From e20875de028fc801613849bb8006759655d811eb Mon Sep 17 00:00:00 2001 From: Yatin Gupta Date: Sun, 31 Mar 2024 17:59:07 -0700 Subject: [PATCH] chore(deps): Update open api generator in python sdk client to 7.4.0 Signed-off-by: Yatin Gupta --- sdks/python/Makefile | 2 +- sdks/python/client/argo_workflows/__init__.py | 425 +- .../client/argo_workflows/api/__init__.py | 17 +- .../api/archived_workflow_service_api.py | 3159 +- .../api/artifact_service_api.py | 2269 +- .../cluster_workflow_template_service_api.py | 2714 +- .../api/cron_workflow_service_api.py | 3728 +- .../argo_workflows/api/event_service_api.py | 1035 +- .../api/event_source_service_api.py | 3610 +- .../argo_workflows/api/info_service_api.py | 1482 +- .../argo_workflows/api/sensor_service_api.py | 3610 +- .../api/workflow_service_api.py | 8630 ++- .../api/workflow_template_service_api.py | 2877 +- .../client/argo_workflows/api_client.py | 1164 +- .../client/argo_workflows/api_response.py | 21 + .../client/argo_workflows/apis/__init__.py | 26 - .../client/argo_workflows/configuration.py | 132 +- .../client/argo_workflows/exceptions.py | 100 +- .../client/argo_workflows/model/__init__.py | 5 - .../client/argo_workflows/model/affinity.py | 273 - .../aws_elastic_block_store_volume_source.py | 273 - .../model/azure_disk_volume_source.py | 283 - .../model/azure_file_volume_source.py | 271 - .../argo_workflows/model/capabilities.py | 259 - .../model/ceph_fs_volume_source.py | 287 - .../model/cinder_volume_source.py | 279 - .../model/config_map_env_source.py | 259 - .../model/config_map_key_selector.py | 269 - .../model/config_map_projection.py | 269 - .../model/config_map_volume_source.py | 273 - .../client/argo_workflows/model/container.py | 376 - .../argo_workflows/model/container_port.py | 282 - .../argo_workflows/model/create_options.py | 263 - .../argo_workflows/model/csi_volume_source.py | 283 - .../model/downward_api_projection.py | 261 - .../model/downward_api_volume_file.py | 281 - .../model/downward_api_volume_source.py | 265 - .../client/argo_workflows/model/duration.py | 255 - .../model/empty_dir_volume_source.py | 259 - .../argo_workflows/model/env_from_source.py | 271 - .../client/argo_workflows/model/env_var.py | 275 - .../argo_workflows/model/env_var_source.py | 279 - .../model/ephemeral_volume_source.py | 261 - .../client/argo_workflows/model/event.py | 339 - .../argo_workflows/model/event_series.py | 259 - .../argo_workflows/model/event_source.py | 259 - ...eventsource_create_event_source_request.py | 265 - .../eventsource_event_source_watch_event.py | 265 - .../model/eventsource_log_entry.py | 279 - ...eventsource_update_event_source_request.py | 269 - .../argo_workflows/model/exec_action.py | 255 - .../argo_workflows/model/fc_volume_source.py | 271 - .../model/flex_volume_source.py | 283 - .../model/flocker_volume_source.py | 259 - .../gce_persistent_disk_volume_source.py | 273 - .../model/git_repo_volume_source.py | 269 - .../model/glusterfs_volume_source.py | 271 - .../model/google_protobuf_any.py | 264 - .../model/group_version_resource.py | 263 - .../argo_workflows/model/grpc_action.py | 265 - .../model/grpc_gateway_runtime_error.py | 273 - .../grpc_gateway_runtime_stream_error.py | 277 - .../client/argo_workflows/model/host_alias.py | 259 - .../model/host_path_volume_source.py | 265 - .../argo_workflows/model/http_get_action.py | 287 - .../argo_workflows/model/http_header.py | 267 - .../io_argoproj_events_v1alpha1_amount.py | 260 - ...roj_events_v1alpha1_amqp_consume_config.py | 271 - ...oproj_events_v1alpha1_amqp_event_source.py | 333 - ...s_v1alpha1_amqp_exchange_declare_config.py | 267 - ..._events_v1alpha1_amqp_queue_bind_config.py | 255 - ...ents_v1alpha1_amqp_queue_declare_config.py | 275 - ...j_events_v1alpha1_argo_workflow_trigger.py | 275 - ...oproj_events_v1alpha1_artifact_location.py | 295 - ...proj_events_v1alpha1_aws_lambda_trigger.py | 291 - ...vents_v1alpha1_azure_event_hubs_trigger.py | 283 - ..._v1alpha1_azure_events_hub_event_source.py | 283 - ...alpha1_azure_queue_storage_event_source.py | 295 - ...v1alpha1_azure_service_bus_event_source.py | 297 - ...ents_v1alpha1_azure_service_bus_trigger.py | 289 - .../io_argoproj_events_v1alpha1_backoff.py | 275 - .../io_argoproj_events_v1alpha1_basic_auth.py | 265 - ...argoproj_events_v1alpha1_bitbucket_auth.py | 267 - ...oj_events_v1alpha1_bitbucket_basic_auth.py | 265 - ..._events_v1alpha1_bitbucket_event_source.py | 303 - ...oj_events_v1alpha1_bitbucket_repository.py | 259 - ..._v1alpha1_bitbucket_server_event_source.py | 313 - ...ts_v1alpha1_bitbucket_server_repository.py | 259 - ...j_events_v1alpha1_calendar_event_source.py | 287 - ...j_events_v1alpha1_catchup_configuration.py | 259 - .../io_argoproj_events_v1alpha1_condition.py | 271 - ...vents_v1alpha1_conditions_reset_by_time.py | 259 - ...ents_v1alpha1_conditions_reset_criteria.py | 261 - ..._events_v1alpha1_config_map_persistence.py | 259 - ...argoproj_events_v1alpha1_custom_trigger.py | 287 - ...io_argoproj_events_v1alpha1_data_filter.py | 271 - ..._argoproj_events_v1alpha1_email_trigger.py | 295 - ...oj_events_v1alpha1_emitter_event_source.py | 303 - ..._argoproj_events_v1alpha1_event_context.py | 279 - ...goproj_events_v1alpha1_event_dependency.py | 283 - ...events_v1alpha1_event_dependency_filter.py | 291 - ...s_v1alpha1_event_dependency_transformer.py | 259 - ...oproj_events_v1alpha1_event_persistence.py | 267 - ...o_argoproj_events_v1alpha1_event_source.py | 273 - ...roj_events_v1alpha1_event_source_filter.py | 255 - ...oproj_events_v1alpha1_event_source_list.py | 267 - ...oproj_events_v1alpha1_event_source_spec.py | 461 - ...roj_events_v1alpha1_event_source_status.py | 261 - ...io_argoproj_events_v1alpha1_expr_filter.py | 265 - ..._argoproj_events_v1alpha1_file_artifact.py | 255 - ...oproj_events_v1alpha1_file_event_source.py | 279 - ...oj_events_v1alpha1_generic_event_source.py | 287 - ...roj_events_v1alpha1_gerrit_event_source.py | 301 - ...o_argoproj_events_v1alpha1_git_artifact.py | 301 - .../io_argoproj_events_v1alpha1_git_creds.py | 265 - ...oproj_events_v1alpha1_git_remote_config.py | 259 - ...goproj_events_v1alpha1_github_app_creds.py | 269 - ...roj_events_v1alpha1_github_event_source.py | 337 - ...roj_events_v1alpha1_gitlab_event_source.py | 309 - ...oproj_events_v1alpha1_hdfs_event_source.py | 315 - ...o_argoproj_events_v1alpha1_http_trigger.py | 299 - ...rgoproj_events_v1alpha1_int64_or_string.py | 263 - ...oj_events_v1alpha1_k8_s_resource_policy.py | 269 - ...oj_events_v1alpha1_kafka_consumer_group.py | 263 - ...proj_events_v1alpha1_kafka_event_source.py | 317 - ..._argoproj_events_v1alpha1_kafka_trigger.py | 315 - ...io_argoproj_events_v1alpha1_log_trigger.py | 255 - .../io_argoproj_events_v1alpha1_metadata.py | 259 - ...oproj_events_v1alpha1_mqtt_event_source.py | 299 - .../io_argoproj_events_v1alpha1_nats_auth.py | 275 - ...proj_events_v1alpha1_nats_events_source.py | 295 - ...o_argoproj_events_v1alpha1_nats_trigger.py | 279 - ...goproj_events_v1alpha1_nsq_event_source.py | 293 - ...proj_events_v1alpha1_open_whisk_trigger.py | 287 - ...proj_events_v1alpha1_owned_repositories.py | 259 - ..._argoproj_events_v1alpha1_payload_field.py | 259 - ...oj_events_v1alpha1_pub_sub_event_source.py | 295 - ...roj_events_v1alpha1_pulsar_event_source.py | 319 - ...argoproj_events_v1alpha1_pulsar_trigger.py | 311 - .../io_argoproj_events_v1alpha1_rate_limit.py | 259 - ...proj_events_v1alpha1_redis_event_source.py | 301 - ...ents_v1alpha1_redis_stream_event_source.py | 301 - .../io_argoproj_events_v1alpha1_resource.py | 260 - ...j_events_v1alpha1_resource_event_source.py | 279 - ...rgoproj_events_v1alpha1_resource_filter.py | 277 - ...io_argoproj_events_v1alpha1_s3_artifact.py | 301 - .../io_argoproj_events_v1alpha1_s3_bucket.py | 259 - .../io_argoproj_events_v1alpha1_s3_filter.py | 259 - ...io_argoproj_events_v1alpha1_sasl_config.py | 269 - ..._events_v1alpha1_schema_registry_config.py | 269 - ..._argoproj_events_v1alpha1_secure_header.py | 265 - .../io_argoproj_events_v1alpha1_selector.py | 263 - .../io_argoproj_events_v1alpha1_sensor.py | 273 - ...io_argoproj_events_v1alpha1_sensor_list.py | 267 - ...io_argoproj_events_v1alpha1_sensor_spec.py | 293 - ..._argoproj_events_v1alpha1_sensor_status.py | 261 - .../io_argoproj_events_v1alpha1_service.py | 265 - ...oproj_events_v1alpha1_sftp_event_source.py | 297 - ...proj_events_v1alpha1_slack_event_source.py | 281 - ...o_argoproj_events_v1alpha1_slack_sender.py | 259 - ...o_argoproj_events_v1alpha1_slack_thread.py | 259 - ..._argoproj_events_v1alpha1_slack_trigger.py | 295 - ...goproj_events_v1alpha1_sns_event_source.py | 301 - ...goproj_events_v1alpha1_sqs_event_source.py | 311 - ...j_events_v1alpha1_standard_k8_s_trigger.py | 279 - .../io_argoproj_events_v1alpha1_status.py | 261 - ..._argoproj_events_v1alpha1_status_policy.py | 255 - ...ents_v1alpha1_storage_grid_event_source.py | 297 - ...roj_events_v1alpha1_storage_grid_filter.py | 259 - ...roj_events_v1alpha1_stripe_event_source.py | 279 - .../io_argoproj_events_v1alpha1_template.py | 313 - ...io_argoproj_events_v1alpha1_time_filter.py | 259 - .../io_argoproj_events_v1alpha1_tls_config.py | 273 - .../io_argoproj_events_v1alpha1_trigger.py | 289 - ...oproj_events_v1alpha1_trigger_parameter.py | 269 - ...vents_v1alpha1_trigger_parameter_source.py | 279 - ...argoproj_events_v1alpha1_trigger_policy.py | 267 - ...goproj_events_v1alpha1_trigger_template.py | 353 - ...o_argoproj_events_v1alpha1_url_artifact.py | 259 - ...oproj_events_v1alpha1_value_from_source.py | 267 - ...oproj_events_v1alpha1_watch_path_config.py | 263 - ...rgoproj_events_v1alpha1_webhook_context.py | 293 - ...oj_events_v1alpha1_webhook_event_source.py | 267 - ...proj_workflow_v1alpha1_archive_strategy.py | 269 - ...io_argoproj_workflow_v1alpha1_arguments.py | 267 - ...rgoproj_workflow_v1alpha1_art_gc_status.py | 263 - .../io_argoproj_workflow_v1alpha1_artifact.py | 371 - ..._argoproj_workflow_v1alpha1_artifact_gc.py | 269 - ...proj_workflow_v1alpha1_artifact_gc_spec.py | 261 - ...oj_workflow_v1alpha1_artifact_gc_status.py | 261 - ...roj_workflow_v1alpha1_artifact_location.py | 313 - ...oj_workflow_v1alpha1_artifact_node_spec.py | 267 - ...goproj_workflow_v1alpha1_artifact_paths.py | 371 - ...j_workflow_v1alpha1_artifact_repository.py | 295 - ...rkflow_v1alpha1_artifact_repository_ref.py | 259 - ...v1alpha1_artifact_repository_ref_status.py | 277 - ...oproj_workflow_v1alpha1_artifact_result.py | 269 - ...ow_v1alpha1_artifact_result_node_status.py | 261 - ..._workflow_v1alpha1_artifactory_artifact.py | 275 - ...1alpha1_artifactory_artifact_repository.py | 273 - ...goproj_workflow_v1alpha1_azure_artifact.py | 287 - ...flow_v1alpha1_azure_artifact_repository.py | 285 - .../io_argoproj_workflow_v1alpha1_backoff.py | 263 - ...o_argoproj_workflow_v1alpha1_basic_auth.py | 265 - .../io_argoproj_workflow_v1alpha1_cache.py | 267 - ...proj_workflow_v1alpha1_client_cert_auth.py | 265 - ...flow_v1alpha1_cluster_workflow_template.py | 283 - ...luster_workflow_template_create_request.py | 267 - ..._cluster_workflow_template_lint_request.py | 267 - ...v1alpha1_cluster_workflow_template_list.py | 283 - ...luster_workflow_template_update_request.py | 265 - ...workflow_v1alpha1_collect_event_request.py | 255 - .../io_argoproj_workflow_v1alpha1_column.py | 273 - ...io_argoproj_workflow_v1alpha1_condition.py | 263 - ...goproj_workflow_v1alpha1_container_node.py | 371 - ...w_v1alpha1_container_set_retry_strategy.py | 265 - ...orkflow_v1alpha1_container_set_template.py | 279 - ..._argoproj_workflow_v1alpha1_continue_on.py | 259 - .../io_argoproj_workflow_v1alpha1_counter.py | 261 - ...w_v1alpha1_create_cron_workflow_request.py | 271 - ...kflow_v1alpha1_create_s3_bucket_options.py | 255 - ...rgoproj_workflow_v1alpha1_cron_workflow.py | 289 - ...oj_workflow_v1alpha1_cron_workflow_list.py | 283 - ...w_v1alpha1_cron_workflow_resume_request.py | 259 - ...oj_workflow_v1alpha1_cron_workflow_spec.py | 313 - ..._workflow_v1alpha1_cron_workflow_status.py | 299 - ..._v1alpha1_cron_workflow_suspend_request.py | 259 - .../io_argoproj_workflow_v1alpha1_dag_task.py | 329 - ...argoproj_workflow_v1alpha1_dag_template.py | 275 - .../io_argoproj_workflow_v1alpha1_data.py | 275 - ..._argoproj_workflow_v1alpha1_data_source.py | 261 - .../io_argoproj_workflow_v1alpha1_event.py | 261 - ...oproj_workflow_v1alpha1_executor_config.py | 255 - .../io_argoproj_workflow_v1alpha1_gauge.py | 271 - ...argoproj_workflow_v1alpha1_gcs_artifact.py | 275 - ...rkflow_v1alpha1_gcs_artifact_repository.py | 269 - ...orkflow_v1alpha1_get_user_info_response.py | 283 - ...argoproj_workflow_v1alpha1_git_artifact.py | 307 - ...rgoproj_workflow_v1alpha1_hdfs_artifact.py | 305 - ...kflow_v1alpha1_hdfs_artifact_repository.py | 299 - .../io_argoproj_workflow_v1alpha1_header.py | 267 - ...io_argoproj_workflow_v1alpha1_histogram.py | 267 - .../io_argoproj_workflow_v1alpha1_http.py | 297 - ...rgoproj_workflow_v1alpha1_http_artifact.py | 277 - ...io_argoproj_workflow_v1alpha1_http_auth.py | 273 - ...proj_workflow_v1alpha1_http_body_source.py | 260 - ..._argoproj_workflow_v1alpha1_http_header.py | 275 - ...oj_workflow_v1alpha1_http_header_source.py | 261 - ...rgoproj_workflow_v1alpha1_info_response.py | 279 - .../io_argoproj_workflow_v1alpha1_inputs.py | 267 - ...o_argoproj_workflow_v1alpha1_label_keys.py | 255 - ...proj_workflow_v1alpha1_label_value_from.py | 261 - ...argoproj_workflow_v1alpha1_label_values.py | 255 - ...goproj_workflow_v1alpha1_lifecycle_hook.py | 275 - .../io_argoproj_workflow_v1alpha1_link.py | 273 - ...low_v1alpha1_lint_cron_workflow_request.py | 265 - ...io_argoproj_workflow_v1alpha1_log_entry.py | 259 - ...rgoproj_workflow_v1alpha1_manifest_from.py | 267 - ...oj_workflow_v1alpha1_memoization_status.py | 273 - .../io_argoproj_workflow_v1alpha1_memoize.py | 279 - .../io_argoproj_workflow_v1alpha1_metadata.py | 259 - ...argoproj_workflow_v1alpha1_metric_label.py | 267 - .../io_argoproj_workflow_v1alpha1_metrics.py | 267 - .../io_argoproj_workflow_v1alpha1_mutex.py | 259 - ...rgoproj_workflow_v1alpha1_mutex_holding.py | 259 - ...argoproj_workflow_v1alpha1_mutex_status.py | 265 - ...io_argoproj_workflow_v1alpha1_node_flag.py | 259 - ..._argoproj_workflow_v1alpha1_node_result.py | 273 - ..._argoproj_workflow_v1alpha1_node_status.py | 377 - ...ow_v1alpha1_node_synchronization_status.py | 255 - ...argoproj_workflow_v1alpha1_o_auth2_auth.py | 279 - ...orkflow_v1alpha1_o_auth2_endpoint_param.py | 265 - ...argoproj_workflow_v1alpha1_oss_artifact.py | 301 - ...rkflow_v1alpha1_oss_artifact_repository.py | 295 - ...oj_workflow_v1alpha1_oss_lifecycle_rule.py | 259 - .../io_argoproj_workflow_v1alpha1_outputs.py | 275 - ...goproj_workflow_v1alpha1_parallel_steps.py | 283 - ...io_argoproj_workflow_v1alpha1_parameter.py | 291 - .../io_argoproj_workflow_v1alpha1_pod_gc.py | 271 - ...o_argoproj_workflow_v1alpha1_prometheus.py | 299 - ...argoproj_workflow_v1alpha1_raw_artifact.py | 261 - ...roj_workflow_v1alpha1_resource_template.py | 295 - ...pha1_resubmit_archived_workflow_request.py | 271 - ...goproj_workflow_v1alpha1_retry_affinity.py | 255 - ...1alpha1_retry_archived_workflow_request.py | 275 - ...goproj_workflow_v1alpha1_retry_strategy.py | 279 - ..._argoproj_workflow_v1alpha1_s3_artifact.py | 309 - ...orkflow_v1alpha1_s3_artifact_repository.py | 313 - ...workflow_v1alpha1_s3_encryption_options.py | 273 - ...oproj_workflow_v1alpha1_script_template.py | 373 - ...roj_workflow_v1alpha1_semaphore_holding.py | 259 - ...rgoproj_workflow_v1alpha1_semaphore_ref.py | 265 - ...proj_workflow_v1alpha1_semaphore_status.py | 265 - .../io_argoproj_workflow_v1alpha1_sequence.py | 267 - ...rgoproj_workflow_v1alpha1_stop_strategy.py | 261 - .../io_argoproj_workflow_v1alpha1_submit.py | 279 - ..._argoproj_workflow_v1alpha1_submit_opts.py | 305 - ...proj_workflow_v1alpha1_suspend_template.py | 255 - ...oproj_workflow_v1alpha1_synchronization.py | 267 - ...orkflow_v1alpha1_synchronization_status.py | 267 - ...argoproj_workflow_v1alpha1_tar_strategy.py | 255 - .../io_argoproj_workflow_v1alpha1_template.py | 459 - ...argoproj_workflow_v1alpha1_template_ref.py | 263 - ...j_workflow_v1alpha1_transformation_step.py | 261 - ...argoproj_workflow_v1alpha1_ttl_strategy.py | 263 - ...w_v1alpha1_update_cron_workflow_request.py | 269 - ...goproj_workflow_v1alpha1_user_container.py | 371 - ...o_argoproj_workflow_v1alpha1_value_from.py | 293 - .../io_argoproj_workflow_v1alpha1_version.py | 303 - ...oproj_workflow_v1alpha1_volume_claim_gc.py | 255 - .../io_argoproj_workflow_v1alpha1_workflow.py | 289 - ...rkflow_v1alpha1_workflow_create_request.py | 279 - ...orkflow_v1alpha1_workflow_event_binding.py | 283 - ...ow_v1alpha1_workflow_event_binding_list.py | 283 - ...ow_v1alpha1_workflow_event_binding_spec.py | 273 - ...low_v1alpha1_workflow_level_artifact_gc.py | 277 - ...workflow_v1alpha1_workflow_lint_request.py | 265 - ...rgoproj_workflow_v1alpha1_workflow_list.py | 283 - ...roj_workflow_v1alpha1_workflow_metadata.py | 269 - ...flow_v1alpha1_workflow_resubmit_request.py | 267 - ...rkflow_v1alpha1_workflow_resume_request.py | 263 - ...orkflow_v1alpha1_workflow_retry_request.py | 271 - ..._workflow_v1alpha1_workflow_set_request.py | 275 - ...rgoproj_workflow_v1alpha1_workflow_spec.py | 471 - ...oproj_workflow_v1alpha1_workflow_status.py | 349 - ...rgoproj_workflow_v1alpha1_workflow_step.py | 315 - ...workflow_v1alpha1_workflow_stop_request.py | 267 - ...rkflow_v1alpha1_workflow_submit_request.py | 273 - ...kflow_v1alpha1_workflow_suspend_request.py | 259 - ...orkflow_v1alpha1_workflow_task_set_spec.py | 261 - ...kflow_v1alpha1_workflow_task_set_status.py | 261 - ...roj_workflow_v1alpha1_workflow_template.py | 283 - ...alpha1_workflow_template_create_request.py | 271 - ...v1alpha1_workflow_template_lint_request.py | 271 - ...orkflow_v1alpha1_workflow_template_list.py | 283 - ...workflow_v1alpha1_workflow_template_ref.py | 259 - ...alpha1_workflow_template_update_request.py | 269 - ...low_v1alpha1_workflow_terminate_request.py | 259 - ..._workflow_v1alpha1_workflow_watch_event.py | 265 - ...pi_policy_v1_pod_disruption_budget_spec.py | 269 - .../model/iscsi_volume_source.py | 311 - .../argo_workflows/model/key_to_path.py | 271 - .../argo_workflows/model/label_selector.py | 265 - .../model/label_selector_requirement.py | 271 - .../client/argo_workflows/model/lifecycle.py | 265 - .../argo_workflows/model/lifecycle_handler.py | 273 - .../client/argo_workflows/model/list_meta.py | 267 - .../model/local_object_reference.py | 255 - .../model/managed_fields_entry.py | 279 - .../argo_workflows/model/nfs_volume_source.py | 271 - .../argo_workflows/model/node_affinity.py | 267 - .../argo_workflows/model/node_selector.py | 267 - .../model/node_selector_requirement.py | 279 - .../model/node_selector_term.py | 265 - .../model/object_field_selector.py | 265 - .../argo_workflows/model/object_meta.py | 323 - .../argo_workflows/model/object_reference.py | 279 - .../argo_workflows/model/owner_reference.py | 287 - .../model/persistent_volume_claim.py | 281 - .../persistent_volume_claim_condition.py | 287 - .../model/persistent_volume_claim_spec.py | 293 - .../model/persistent_volume_claim_status.py | 286 - .../model/persistent_volume_claim_template.py | 273 - .../persistent_volume_claim_volume_source.py | 265 - .../photon_persistent_disk_volume_source.py | 265 - .../argo_workflows/model/pod_affinity.py | 267 - .../argo_workflows/model/pod_affinity_term.py | 279 - .../argo_workflows/model/pod_anti_affinity.py | 267 - .../argo_workflows/model/pod_dns_config.py | 269 - .../model/pod_dns_config_option.py | 259 - .../model/pod_security_context.py | 303 - .../model/portworx_volume_source.py | 269 - .../model/preferred_scheduling_term.py | 273 - .../client/argo_workflows/model/probe.py | 303 - .../model/projected_volume_source.py | 265 - .../model/quobyte_volume_source.py | 283 - .../argo_workflows/model/rbd_volume_source.py | 297 - .../model/resource_field_selector.py | 269 - .../model/resource_requirements.py | 259 - .../model/scale_io_volume_source.py | 307 - .../argo_workflows/model/se_linux_options.py | 267 - .../argo_workflows/model/seccomp_profile.py | 270 - .../argo_workflows/model/secret_env_source.py | 259 - .../model/secret_key_selector.py | 269 - .../argo_workflows/model/secret_projection.py | 269 - .../model/secret_volume_source.py | 273 - .../argo_workflows/model/security_context.py | 307 - .../model/sensor_create_sensor_request.py | 271 - .../argo_workflows/model/sensor_log_entry.py | 283 - .../model/sensor_sensor_watch_event.py | 265 - .../model/sensor_update_sensor_request.py | 269 - .../model/service_account_token_projection.py | 269 - .../argo_workflows/model/service_port.py | 286 - .../argo_workflows/model/status_cause.py | 263 - .../model/storage_os_volume_source.py | 277 - .../model/stream_result_of_event.py | 267 - ...of_eventsource_event_source_watch_event.py | 267 - .../stream_result_of_eventsource_log_entry.py | 267 - ...io_argoproj_workflow_v1alpha1_log_entry.py | 267 - ..._workflow_v1alpha1_workflow_watch_event.py | 267 - .../stream_result_of_sensor_log_entry.py | 267 - ...eam_result_of_sensor_sensor_watch_event.py | 267 - .../client/argo_workflows/model/sysctl.py | 267 - .../argo_workflows/model/tcp_socket_action.py | 265 - .../client/argo_workflows/model/toleration.py | 280 - .../model/typed_local_object_reference.py | 271 - .../client/argo_workflows/model/volume.py | 439 - .../argo_workflows/model/volume_device.py | 267 - .../argo_workflows/model/volume_mount.py | 283 - .../argo_workflows/model/volume_projection.py | 279 - .../vsphere_virtual_disk_volume_source.py | 273 - .../model/weighted_pod_affinity_term.py | 273 - .../model/windows_security_context_options.py | 267 - .../client/argo_workflows/model_utils.py | 2037 - .../client/argo_workflows/models/__init__.py | 809 +- .../client/argo_workflows/models/affinity.py | 103 + .../aws_elastic_block_store_volume_source.py | 93 + .../models/azure_disk_volume_source.py | 97 + .../models/azure_file_volume_source.py | 91 + .../argo_workflows/models/capabilities.py | 89 + .../models/ceph_fs_volume_source.py | 101 + .../models/cinder_volume_source.py | 97 + .../models/config_map_env_source.py | 89 + .../models/config_map_key_selector.py | 91 + .../models/config_map_projection.py | 99 + .../models/config_map_volume_source.py | 101 + .../client/argo_workflows/models/container.py | 211 + .../argo_workflows/models/container_port.py | 105 + .../argo_workflows/models/create_options.py | 91 + .../models/csi_volume_source.py | 99 + .../models/downward_api_projection.py | 95 + .../models/downward_api_volume_file.py | 101 + .../models/downward_api_volume_source.py | 97 + .../client/argo_workflows/models/duration.py | 87 + .../models/empty_dir_volume_source.py | 89 + .../argo_workflows/models/env_from_source.py | 99 + .../client/argo_workflows/models/env_var.py | 95 + .../argo_workflows/models/env_var_source.py | 109 + .../models/ephemeral_volume_source.py | 91 + .../client/argo_workflows/models/event.py | 139 + .../argo_workflows/models/event_series.py | 90 + .../argo_workflows/models/event_source.py | 89 + ...eventsource_create_event_source_request.py | 93 + .../eventsource_event_source_watch_event.py | 93 + .../models/eventsource_log_entry.py | 100 + ...eventsource_update_event_source_request.py | 95 + .../argo_workflows/models/exec_action.py | 87 + .../argo_workflows/models/fc_volume_source.py | 95 + .../models/flex_volume_source.py | 99 + .../models/flocker_volume_source.py | 89 + .../gce_persistent_disk_volume_source.py | 93 + .../models/git_repo_volume_source.py | 91 + .../models/glusterfs_volume_source.py | 91 + .../models/google_protobuf_any.py | 100 + .../models/group_version_resource.py | 91 + .../argo_workflows/models/grpc_action.py | 89 + .../models/grpc_gateway_runtime_error.py | 101 + .../grpc_gateway_runtime_stream_error.py | 103 + .../argo_workflows/models/host_alias.py | 89 + .../models/host_path_volume_source.py | 89 + .../argo_workflows/models/http_get_action.py | 113 + .../argo_workflows/models/http_header.py | 89 + .../io_argoproj_events_v1alpha1_amount.py | 98 + ...roj_events_v1alpha1_amqp_consume_config.py | 95 + ...oproj_events_v1alpha1_amqp_event_source.py | 151 + ...s_v1alpha1_amqp_exchange_declare_config.py | 93 + ..._events_v1alpha1_amqp_queue_bind_config.py | 87 + ...ents_v1alpha1_amqp_queue_declare_config.py | 97 + ...j_events_v1alpha1_argo_workflow_trigger.py | 105 + ...oproj_events_v1alpha1_artifact_location.py | 123 + ...proj_events_v1alpha1_aws_lambda_trigger.py | 123 + ...vents_v1alpha1_azure_event_hubs_trigger.py | 119 + ..._v1alpha1_azure_events_hub_event_source.py | 108 + ...alpha1_azure_queue_storage_event_source.py | 111 + ...v1alpha1_azure_service_bus_event_source.py | 115 + ...ents_v1alpha1_azure_service_bus_trigger.py | 122 + .../io_argoproj_events_v1alpha1_backoff.py | 104 + .../io_argoproj_events_v1alpha1_basic_auth.py | 96 + ...argoproj_events_v1alpha1_bitbucket_auth.py | 97 + ...oj_events_v1alpha1_bitbucket_basic_auth.py | 96 + ..._events_v1alpha1_bitbucket_event_source.py | 125 + ...oj_events_v1alpha1_bitbucket_repository.py | 89 + ..._v1alpha1_bitbucket_server_event_source.py | 136 + ...ts_v1alpha1_bitbucket_server_repository.py | 89 + ...j_events_v1alpha1_calendar_event_source.py | 107 + ...j_events_v1alpha1_catchup_configuration.py | 89 + .../io_argoproj_events_v1alpha1_condition.py | 96 + ...vents_v1alpha1_conditions_reset_by_time.py | 89 + ...ents_v1alpha1_conditions_reset_criteria.py | 91 + ..._events_v1alpha1_config_map_persistence.py | 89 + ...argoproj_events_v1alpha1_custom_trigger.py | 118 + ...io_argoproj_events_v1alpha1_data_filter.py | 95 + ..._argoproj_events_v1alpha1_email_trigger.py | 115 + ...oj_events_v1alpha1_emitter_event_source.py | 124 + ..._argoproj_events_v1alpha1_event_context.py | 100 + ...goproj_events_v1alpha1_event_dependency.py | 105 + ...events_v1alpha1_event_dependency_filter.py | 123 + ...s_v1alpha1_event_dependency_transformer.py | 89 + ...oproj_events_v1alpha1_event_persistence.py | 97 + ...o_argoproj_events_v1alpha1_event_source.py | 103 + ...roj_events_v1alpha1_event_source_filter.py | 87 + ...oproj_events_v1alpha1_event_source_list.py | 101 + ...oproj_events_v1alpha1_event_source_spec.py | 566 + ...roj_events_v1alpha1_event_source_status.py | 91 + ...io_argoproj_events_v1alpha1_expr_filter.py | 97 + ..._argoproj_events_v1alpha1_file_artifact.py | 87 + ...oproj_events_v1alpha1_file_event_source.py | 103 + ...oj_events_v1alpha1_generic_event_source.py | 107 + ...roj_events_v1alpha1_gerrit_event_source.py | 117 + ...o_argoproj_events_v1alpha1_git_artifact.py | 117 + .../io_argoproj_events_v1alpha1_git_creds.py | 96 + ...oproj_events_v1alpha1_git_remote_config.py | 89 + ...goproj_events_v1alpha1_github_app_creds.py | 95 + ...roj_events_v1alpha1_github_event_source.py | 148 + ...roj_events_v1alpha1_gitlab_event_source.py | 124 + ...oproj_events_v1alpha1_hdfs_event_source.py | 130 + ...o_argoproj_events_v1alpha1_http_trigger.py | 134 + ...rgoproj_events_v1alpha1_int64_or_string.py | 91 + ...oj_events_v1alpha1_k8_s_resource_policy.py | 95 + ...oj_events_v1alpha1_kafka_consumer_group.py | 91 + ...proj_events_v1alpha1_kafka_event_source.py | 131 + ..._argoproj_events_v1alpha1_kafka_trigger.py | 138 + ...io_argoproj_events_v1alpha1_log_trigger.py | 87 + .../io_argoproj_events_v1alpha1_metadata.py | 89 + ...oproj_events_v1alpha1_mqtt_event_source.py | 119 + .../io_argoproj_events_v1alpha1_nats_auth.py | 107 + ...proj_events_v1alpha1_nats_events_source.py | 117 + ...o_argoproj_events_v1alpha1_nats_trigger.py | 114 + ...goproj_events_v1alpha1_nsq_event_source.py | 113 + ...proj_events_v1alpha1_open_whisk_trigger.py | 118 + ...proj_events_v1alpha1_owned_repositories.py | 89 + ..._argoproj_events_v1alpha1_payload_field.py | 89 + ...oj_events_v1alpha1_pub_sub_event_source.py | 111 + ...roj_events_v1alpha1_pulsar_event_source.py | 135 + ...argoproj_events_v1alpha1_pulsar_trigger.py | 142 + .../io_argoproj_events_v1alpha1_rate_limit.py | 89 + ...proj_events_v1alpha1_redis_event_source.py | 117 + ...ents_v1alpha1_redis_stream_event_source.py | 117 + .../io_argoproj_events_v1alpha1_resource.py | 98 + ...j_events_v1alpha1_resource_event_source.py | 103 + ...rgoproj_events_v1alpha1_resource_filter.py | 111 + ...io_argoproj_events_v1alpha1_s3_artifact.py | 123 + .../io_argoproj_events_v1alpha1_s3_bucket.py | 89 + .../io_argoproj_events_v1alpha1_s3_filter.py | 89 + ...io_argoproj_events_v1alpha1_sasl_config.py | 98 + ..._events_v1alpha1_schema_registry_config.py | 95 + ..._argoproj_events_v1alpha1_secure_header.py | 93 + .../io_argoproj_events_v1alpha1_selector.py | 91 + .../io_argoproj_events_v1alpha1_sensor.py | 103 + ...io_argoproj_events_v1alpha1_sensor_list.py | 101 + ...io_argoproj_events_v1alpha1_sensor_spec.py | 121 + ..._argoproj_events_v1alpha1_sensor_status.py | 91 + .../io_argoproj_events_v1alpha1_service.py | 97 + ...oproj_events_v1alpha1_sftp_event_source.py | 124 + ...proj_events_v1alpha1_slack_event_source.py | 110 + ...o_argoproj_events_v1alpha1_slack_sender.py | 89 + ...o_argoproj_events_v1alpha1_slack_thread.py | 89 + ..._argoproj_events_v1alpha1_slack_trigger.py | 121 + ...goproj_events_v1alpha1_sns_event_source.py | 120 + ...goproj_events_v1alpha1_sqs_event_source.py | 125 + ...j_events_v1alpha1_standard_k8_s_trigger.py | 107 + .../io_argoproj_events_v1alpha1_status.py | 95 + ..._argoproj_events_v1alpha1_status_policy.py | 87 + ...ents_v1alpha1_storage_grid_event_source.py | 115 + ...roj_events_v1alpha1_storage_grid_filter.py | 89 + ...roj_events_v1alpha1_stripe_event_source.py | 103 + .../io_argoproj_events_v1alpha1_template.py | 147 + ...io_argoproj_events_v1alpha1_time_filter.py | 89 + .../io_argoproj_events_v1alpha1_tls_config.py | 103 + .../io_argoproj_events_v1alpha1_trigger.py | 121 + ...oproj_events_v1alpha1_trigger_parameter.py | 95 + ...vents_v1alpha1_trigger_parameter_source.py | 99 + ...argoproj_events_v1alpha1_trigger_policy.py | 97 + ...goproj_events_v1alpha1_trigger_template.py | 183 + ...o_argoproj_events_v1alpha1_url_artifact.py | 89 + ...oproj_events_v1alpha1_value_from_source.py | 97 + ...oproj_events_v1alpha1_watch_path_config.py | 91 + ...rgoproj_events_v1alpha1_webhook_context.py | 113 + ...oj_events_v1alpha1_webhook_event_source.py | 97 + ...proj_workflow_v1alpha1_archive_strategy.py | 95 + ...io_argoproj_workflow_v1alpha1_arguments.py | 105 + ...rgoproj_workflow_v1alpha1_art_gc_status.py | 91 + .../io_argoproj_workflow_v1alpha1_artifact.py | 173 + ..._argoproj_workflow_v1alpha1_artifact_gc.py | 95 + ...proj_workflow_v1alpha1_artifact_gc_spec.py | 100 + ...oj_workflow_v1alpha1_artifact_gc_status.py | 100 + ...roj_workflow_v1alpha1_artifact_location.py | 141 + ...oj_workflow_v1alpha1_artifact_node_spec.py | 106 + ...goproj_workflow_v1alpha1_artifact_paths.py | 173 + ...j_workflow_v1alpha1_artifact_repository.py | 123 + ...rkflow_v1alpha1_artifact_repository_ref.py | 89 + ...v1alpha1_artifact_repository_ref_status.py | 99 + ...oproj_workflow_v1alpha1_artifact_result.py | 91 + ...ow_v1alpha1_artifact_result_node_status.py | 100 + ..._workflow_v1alpha1_artifactory_artifact.py | 98 + ...1alpha1_artifactory_artifact_repository.py | 100 + ...goproj_workflow_v1alpha1_azure_artifact.py | 99 + ...flow_v1alpha1_azure_artifact_repository.py | 99 + .../io_argoproj_workflow_v1alpha1_backoff.py | 91 + ...o_argoproj_workflow_v1alpha1_basic_auth.py | 96 + .../io_argoproj_workflow_v1alpha1_cache.py | 91 + ...proj_workflow_v1alpha1_client_cert_auth.py | 96 + ...flow_v1alpha1_cluster_workflow_template.py | 101 + ...luster_workflow_template_create_request.py | 97 + ..._cluster_workflow_template_lint_request.py | 97 + ...v1alpha1_cluster_workflow_template_list.py | 105 + ...luster_workflow_template_update_request.py | 93 + ...workflow_v1alpha1_collect_event_request.py | 87 + .../io_argoproj_workflow_v1alpha1_column.py | 91 + ...io_argoproj_workflow_v1alpha1_condition.py | 91 + ...goproj_workflow_v1alpha1_container_node.py | 193 + ...w_v1alpha1_container_set_retry_strategy.py | 89 + ...orkflow_v1alpha1_container_set_template.py | 111 + ..._argoproj_workflow_v1alpha1_continue_on.py | 89 + .../io_argoproj_workflow_v1alpha1_counter.py | 87 + ...w_v1alpha1_create_cron_workflow_request.py | 99 + ...kflow_v1alpha1_create_s3_bucket_options.py | 87 + ...rgoproj_workflow_v1alpha1_cron_workflow.py | 107 + ...oj_workflow_v1alpha1_cron_workflow_list.py | 105 + ...w_v1alpha1_cron_workflow_resume_request.py | 89 + ...oj_workflow_v1alpha1_cron_workflow_spec.py | 119 + ..._workflow_v1alpha1_cron_workflow_status.py | 114 + ..._v1alpha1_cron_workflow_suspend_request.py | 89 + .../io_argoproj_workflow_v1alpha1_dag_task.py | 148 + ...argoproj_workflow_v1alpha1_dag_template.py | 101 + .../io_argoproj_workflow_v1alpha1_data.py | 101 + ..._argoproj_workflow_v1alpha1_data_source.py | 91 + .../io_argoproj_workflow_v1alpha1_event.py | 87 + ...oproj_workflow_v1alpha1_executor_config.py | 87 + .../io_argoproj_workflow_v1alpha1_gauge.py | 91 + ...argoproj_workflow_v1alpha1_gcs_artifact.py | 95 + ...rkflow_v1alpha1_gcs_artifact_repository.py | 95 + ...orkflow_v1alpha1_get_user_info_response.py | 101 + ...argoproj_workflow_v1alpha1_git_artifact.py | 117 + ...rgoproj_workflow_v1alpha1_hdfs_artifact.py | 116 + ...kflow_v1alpha1_hdfs_artifact_repository.py | 116 + .../io_argoproj_workflow_v1alpha1_header.py | 89 + ...io_argoproj_workflow_v1alpha1_histogram.py | 89 + .../io_argoproj_workflow_v1alpha1_http.py | 113 + ...rgoproj_workflow_v1alpha1_http_artifact.py | 103 + ...io_argoproj_workflow_v1alpha1_http_auth.py | 103 + ...proj_workflow_v1alpha1_http_body_source.py | 98 + ..._argoproj_workflow_v1alpha1_http_header.py | 95 + ...oj_workflow_v1alpha1_http_header_source.py | 91 + ...rgoproj_workflow_v1alpha1_info_response.py | 111 + .../io_argoproj_workflow_v1alpha1_inputs.py | 105 + ...o_argoproj_workflow_v1alpha1_label_keys.py | 87 + ...proj_workflow_v1alpha1_label_value_from.py | 87 + ...argoproj_workflow_v1alpha1_label_values.py | 87 + ...goproj_workflow_v1alpha1_lifecycle_hook.py | 101 + .../io_argoproj_workflow_v1alpha1_link.py | 91 + ...low_v1alpha1_lint_cron_workflow_request.py | 93 + ...io_argoproj_workflow_v1alpha1_log_entry.py | 89 + ...rgoproj_workflow_v1alpha1_manifest_from.py | 91 + ...oj_workflow_v1alpha1_memoization_status.py | 91 + .../io_argoproj_workflow_v1alpha1_memoize.py | 95 + .../io_argoproj_workflow_v1alpha1_metadata.py | 89 + ...argoproj_workflow_v1alpha1_metric_label.py | 89 + .../io_argoproj_workflow_v1alpha1_metrics.py | 95 + .../io_argoproj_workflow_v1alpha1_mutex.py | 89 + ...rgoproj_workflow_v1alpha1_mutex_holding.py | 89 + ...argoproj_workflow_v1alpha1_mutex_status.py | 104 + ...io_argoproj_workflow_v1alpha1_node_flag.py | 89 + ..._argoproj_workflow_v1alpha1_node_result.py | 97 + ..._argoproj_workflow_v1alpha1_node_status.py | 160 + ...ow_v1alpha1_node_synchronization_status.py | 87 + ...argoproj_workflow_v1alpha1_o_auth2_auth.py | 113 + ...orkflow_v1alpha1_o_auth2_endpoint_param.py | 89 + ...argoproj_workflow_v1alpha1_oss_artifact.py | 114 + ...rkflow_v1alpha1_oss_artifact_repository.py | 114 + ...oj_workflow_v1alpha1_oss_lifecycle_rule.py | 89 + .../io_argoproj_workflow_v1alpha1_outputs.py | 109 + ...goproj_workflow_v1alpha1_parallel_steps.py | 86 + ...io_argoproj_workflow_v1alpha1_parameter.py | 103 + .../io_argoproj_workflow_v1alpha1_pod_gc.py | 99 + ...o_argoproj_workflow_v1alpha1_prometheus.py | 119 + ...argoproj_workflow_v1alpha1_raw_artifact.py | 87 + ...roj_workflow_v1alpha1_resource_template.py | 105 + ...pha1_resubmit_archived_workflow_request.py | 95 + ...goproj_workflow_v1alpha1_retry_affinity.py | 87 + ...1alpha1_retry_archived_workflow_request.py | 97 + ...goproj_workflow_v1alpha1_retry_strategy.py | 103 + ..._argoproj_workflow_v1alpha1_s3_artifact.py | 127 + ...orkflow_v1alpha1_s3_artifact_repository.py | 129 + ...workflow_v1alpha1_s3_encryption_options.py | 97 + ...oproj_workflow_v1alpha1_script_template.py | 193 + ...roj_workflow_v1alpha1_semaphore_holding.py | 89 + ...rgoproj_workflow_v1alpha1_semaphore_ref.py | 93 + ...proj_workflow_v1alpha1_semaphore_status.py | 104 + .../io_argoproj_workflow_v1alpha1_sequence.py | 93 + ...rgoproj_workflow_v1alpha1_stop_strategy.py | 87 + .../io_argoproj_workflow_v1alpha1_submit.py | 103 + ..._argoproj_workflow_v1alpha1_submit_opts.py | 113 + ...proj_workflow_v1alpha1_suspend_template.py | 87 + ...oproj_workflow_v1alpha1_synchronization.py | 97 + ...orkflow_v1alpha1_synchronization_status.py | 97 + ...argoproj_workflow_v1alpha1_tar_strategy.py | 87 + .../io_argoproj_workflow_v1alpha1_template.py | 288 + ...argoproj_workflow_v1alpha1_template_ref.py | 91 + ...j_workflow_v1alpha1_transformation_step.py | 87 + ...argoproj_workflow_v1alpha1_ttl_strategy.py | 91 + ...w_v1alpha1_update_cron_workflow_request.py | 95 + ...goproj_workflow_v1alpha1_user_container.py | 193 + ...o_argoproj_workflow_v1alpha1_value_from.py | 107 + .../io_argoproj_workflow_v1alpha1_version.py | 101 + ...oproj_workflow_v1alpha1_volume_claim_gc.py | 87 + .../io_argoproj_workflow_v1alpha1_workflow.py | 107 + ...rkflow_v1alpha1_workflow_create_request.py | 103 + ...orkflow_v1alpha1_workflow_event_binding.py | 101 + ...ow_v1alpha1_workflow_event_binding_list.py | 105 + ...ow_v1alpha1_workflow_event_binding_spec.py | 97 + ...low_v1alpha1_workflow_level_artifact_gc.py | 99 + ...workflow_v1alpha1_workflow_lint_request.py | 93 + ...rgoproj_workflow_v1alpha1_workflow_list.py | 105 + ...roj_workflow_v1alpha1_workflow_metadata.py | 104 + ...flow_v1alpha1_workflow_resubmit_request.py | 93 + ...rkflow_v1alpha1_workflow_resume_request.py | 91 + ...orkflow_v1alpha1_workflow_retry_request.py | 95 + ..._workflow_v1alpha1_workflow_set_request.py | 97 + ...rgoproj_workflow_v1alpha1_workflow_spec.py | 301 + ...oproj_workflow_v1alpha1_workflow_status.py | 186 + ...rgoproj_workflow_v1alpha1_workflow_step.py | 142 + ...workflow_v1alpha1_workflow_stop_request.py | 93 + ...rkflow_v1alpha1_workflow_submit_request.py | 97 + ...kflow_v1alpha1_workflow_suspend_request.py | 89 + ...orkflow_v1alpha1_workflow_task_set_spec.py | 100 + ...kflow_v1alpha1_workflow_task_set_status.py | 100 + ...roj_workflow_v1alpha1_workflow_template.py | 101 + ...alpha1_workflow_template_create_request.py | 99 + ...v1alpha1_workflow_template_lint_request.py | 99 + ...orkflow_v1alpha1_workflow_template_list.py | 105 + ...workflow_v1alpha1_workflow_template_ref.py | 89 + ...alpha1_workflow_template_update_request.py | 95 + ...low_v1alpha1_workflow_terminate_request.py | 89 + ..._workflow_v1alpha1_workflow_watch_event.py | 93 + ...pi_policy_v1_pod_disruption_budget_spec.py | 95 + .../models/iscsi_volume_source.py | 111 + .../argo_workflows/models/key_to_path.py | 91 + .../argo_workflows/models/label_selector.py | 97 + .../models/label_selector_requirement.py | 91 + .../client/argo_workflows/models/lifecycle.py | 96 + .../models/lifecycle_handler.py | 103 + .../client/argo_workflows/models/list_meta.py | 93 + .../models/local_object_reference.py | 87 + .../models/managed_fields_entry.py | 100 + .../models/nfs_volume_source.py | 91 + .../argo_workflows/models/node_affinity.py | 101 + .../argo_workflows/models/node_selector.py | 95 + .../models/node_selector_requirement.py | 98 + .../models/node_selector_term.py | 104 + .../models/object_field_selector.py | 89 + .../argo_workflows/models/object_meta.py | 134 + .../argo_workflows/models/object_reference.py | 99 + .../argo_workflows/models/owner_reference.py | 97 + .../models/persistent_volume_claim.py | 107 + .../persistent_volume_claim_condition.py | 105 + .../models/persistent_volume_claim_spec.py | 116 + .../models/persistent_volume_claim_status.py | 115 + .../persistent_volume_claim_template.py | 97 + .../persistent_volume_claim_volume_source.py | 89 + .../photon_persistent_disk_volume_source.py | 89 + .../argo_workflows/models/pod_affinity.py | 105 + .../models/pod_affinity_term.py | 100 + .../models/pod_anti_affinity.py | 105 + .../argo_workflows/models/pod_dns_config.py | 99 + .../models/pod_dns_config_option.py | 89 + .../models/pod_security_context.py | 125 + .../models/portworx_volume_source.py | 91 + .../models/preferred_scheduling_term.py | 93 + .../client/argo_workflows/models/probe.py | 121 + .../models/projected_volume_source.py | 97 + .../models/quobyte_volume_source.py | 97 + .../models/rbd_volume_source.py | 105 + .../models/resource_field_selector.py | 91 + .../models/resource_requirements.py | 89 + .../models/scale_io_volume_source.py | 109 + .../argo_workflows/models/se_linux_options.py | 93 + .../argo_workflows/models/seccomp_profile.py | 96 + .../models/secret_env_source.py | 89 + .../models/secret_key_selector.py | 91 + .../models/secret_projection.py | 99 + .../models/secret_volume_source.py | 101 + .../argo_workflows/models/security_context.py | 123 + .../models/sensor_create_sensor_request.py | 99 + .../argo_workflows/models/sensor_log_entry.py | 102 + .../models/sensor_sensor_watch_event.py | 93 + .../models/sensor_update_sensor_request.py | 95 + .../service_account_token_projection.py | 91 + .../argo_workflows/models/service_port.py | 107 + .../argo_workflows/models/status_cause.py | 91 + .../models/storage_os_volume_source.py | 99 + .../models/stream_result_of_event.py | 97 + ...of_eventsource_event_source_watch_event.py | 97 + .../stream_result_of_eventsource_log_entry.py | 97 + ...io_argoproj_workflow_v1alpha1_log_entry.py | 97 + ..._workflow_v1alpha1_workflow_watch_event.py | 97 + .../stream_result_of_sensor_log_entry.py | 97 + ...eam_result_of_sensor_sensor_watch_event.py | 97 + .../client/argo_workflows/models/sysctl.py | 89 + .../models/tcp_socket_action.py | 89 + .../argo_workflows/models/toleration.py | 115 + .../models/typed_local_object_reference.py | 91 + .../client/argo_workflows/models/volume.py | 261 + .../argo_workflows/models/volume_device.py | 89 + .../argo_workflows/models/volume_mount.py | 97 + .../models/volume_projection.py | 109 + .../vsphere_virtual_disk_volume_source.py | 93 + .../models/weighted_pod_affinity_term.py | 93 + .../windows_security_context_options.py | 93 + sdks/python/client/argo_workflows/py.typed | 0 sdks/python/client/argo_workflows/rest.py | 367 +- .../docs/AWSElasticBlockStoreVolumeSource.md | 21 +- sdks/python/client/docs/Affinity.md | 19 +- .../client/docs/ArchivedWorkflowServiceApi.md | 300 +- sdks/python/client/docs/ArtifactServiceApi.md | 183 +- .../client/docs/AzureDiskVolumeSource.md | 21 +- .../client/docs/AzureFileVolumeSource.md | 21 +- sdks/python/client/docs/CSIVolumeSource.md | 21 +- sdks/python/client/docs/Capabilities.md | 23 +- sdks/python/client/docs/CephFSVolumeSource.md | 21 +- sdks/python/client/docs/CinderVolumeSource.md | 21 +- .../docs/ClusterWorkflowTemplateServiceApi.md | 32228 +-------- sdks/python/client/docs/ConfigMapEnvSource.md | 19 +- .../client/docs/ConfigMapKeySelector.md | 19 +- .../python/client/docs/ConfigMapProjection.md | 21 +- .../client/docs/ConfigMapVolumeSource.md | 21 +- sdks/python/client/docs/Container.md | 33 +- sdks/python/client/docs/ContainerPort.md | 19 +- sdks/python/client/docs/CreateOptions.md | 21 +- .../client/docs/CronWorkflowServiceApi.md | 32571 +-------- .../client/docs/DownwardAPIProjection.md | 21 +- .../client/docs/DownwardAPIVolumeFile.md | 21 +- .../client/docs/DownwardAPIVolumeSource.md | 21 +- sdks/python/client/docs/Duration.md | 19 +- .../client/docs/EmptyDirVolumeSource.md | 19 +- sdks/python/client/docs/EnvFromSource.md | 19 +- sdks/python/client/docs/EnvVar.md | 19 +- sdks/python/client/docs/EnvVarSource.md | 19 +- .../client/docs/EphemeralVolumeSource.md | 19 +- sdks/python/client/docs/Event.md | 23 +- sdks/python/client/docs/EventSeries.md | 19 +- sdks/python/client/docs/EventServiceApi.md | 96 +- sdks/python/client/docs/EventSource.md | 19 +- .../client/docs/EventSourceServiceApi.md | 5201 +- .../EventsourceCreateEventSourceRequest.md | 19 +- .../docs/EventsourceEventSourceWatchEvent.md | 19 +- .../python/client/docs/EventsourceLogEntry.md | 19 +- .../EventsourceUpdateEventSourceRequest.md | 19 +- sdks/python/client/docs/ExecAction.md | 21 +- sdks/python/client/docs/FCVolumeSource.md | 23 +- sdks/python/client/docs/FlexVolumeSource.md | 21 +- .../python/client/docs/FlockerVolumeSource.md | 19 +- .../docs/GCEPersistentDiskVolumeSource.md | 21 +- sdks/python/client/docs/GRPCAction.md | 19 +- .../python/client/docs/GitRepoVolumeSource.md | 21 +- .../client/docs/GlusterfsVolumeSource.md | 19 +- sdks/python/client/docs/GoogleProtobufAny.md | 21 +- .../client/docs/GroupVersionResource.md | 19 +- .../client/docs/GrpcGatewayRuntimeError.md | 21 +- .../docs/GrpcGatewayRuntimeStreamError.md | 21 +- sdks/python/client/docs/HTTPGetAction.md | 23 +- sdks/python/client/docs/HTTPHeader.md | 19 +- sdks/python/client/docs/HostAlias.md | 21 +- .../client/docs/HostPathVolumeSource.md | 19 +- sdks/python/client/docs/ISCSIVolumeSource.md | 27 +- sdks/python/client/docs/InfoServiceApi.md | 82 +- ...ArgoprojEventsV1alpha1AMQPConsumeConfig.md | 19 +- ...IoArgoprojEventsV1alpha1AMQPEventSource.md | 21 +- ...EventsV1alpha1AMQPExchangeDeclareConfig.md | 19 +- ...goprojEventsV1alpha1AMQPQueueBindConfig.md | 19 +- ...rojEventsV1alpha1AMQPQueueDeclareConfig.md | 19 +- ...oArgoprojEventsV1alpha1AWSLambdaTrigger.md | 23 +- .../docs/IoArgoprojEventsV1alpha1Amount.md | 21 +- ...goprojEventsV1alpha1ArgoWorkflowTrigger.md | 23 +- ...oArgoprojEventsV1alpha1ArtifactLocation.md | 19 +- ...projEventsV1alpha1AzureEventHubsTrigger.md | 23 +- ...EventsV1alpha1AzureEventsHubEventSource.md | 21 +- ...ntsV1alpha1AzureQueueStorageEventSource.md | 21 +- ...ventsV1alpha1AzureServiceBusEventSource.md | 21 +- ...rojEventsV1alpha1AzureServiceBusTrigger.md | 23 +- .../docs/IoArgoprojEventsV1alpha1Backoff.md | 19 +- .../docs/IoArgoprojEventsV1alpha1BasicAuth.md | 19 +- .../IoArgoprojEventsV1alpha1BitbucketAuth.md | 19 +- ...rgoprojEventsV1alpha1BitbucketBasicAuth.md | 19 +- ...oprojEventsV1alpha1BitbucketEventSource.md | 25 +- ...goprojEventsV1alpha1BitbucketRepository.md | 19 +- ...ventsV1alpha1BitbucketServerEventSource.md | 25 +- ...EventsV1alpha1BitbucketServerRepository.md | 19 +- ...goprojEventsV1alpha1CalendarEventSource.md | 23 +- ...oprojEventsV1alpha1CatchupConfiguration.md | 19 +- .../docs/IoArgoprojEventsV1alpha1Condition.md | 19 +- ...projEventsV1alpha1ConditionsResetByTime.md | 19 +- ...ojEventsV1alpha1ConditionsResetCriteria.md | 19 +- ...oprojEventsV1alpha1ConfigMapPersistence.md | 19 +- .../IoArgoprojEventsV1alpha1CustomTrigger.md | 25 +- .../IoArgoprojEventsV1alpha1DataFilter.md | 21 +- .../IoArgoprojEventsV1alpha1EmailTrigger.md | 25 +- ...rgoprojEventsV1alpha1EmitterEventSource.md | 21 +- .../IoArgoprojEventsV1alpha1EventContext.md | 19 +- ...IoArgoprojEventsV1alpha1EventDependency.md | 19 +- ...projEventsV1alpha1EventDependencyFilter.md | 23 +- ...ventsV1alpha1EventDependencyTransformer.md | 19 +- ...oArgoprojEventsV1alpha1EventPersistence.md | 19 +- .../IoArgoprojEventsV1alpha1EventSource.md | 19 +- ...ArgoprojEventsV1alpha1EventSourceFilter.md | 19 +- ...IoArgoprojEventsV1alpha1EventSourceList.md | 21 +- ...IoArgoprojEventsV1alpha1EventSourceSpec.md | 81 +- ...ArgoprojEventsV1alpha1EventSourceStatus.md | 19 +- .../IoArgoprojEventsV1alpha1ExprFilter.md | 21 +- .../IoArgoprojEventsV1alpha1FileArtifact.md | 19 +- ...IoArgoprojEventsV1alpha1FileEventSource.md | 21 +- ...rgoprojEventsV1alpha1GenericEventSource.md | 21 +- ...ArgoprojEventsV1alpha1GerritEventSource.md | 25 +- .../IoArgoprojEventsV1alpha1GitArtifact.md | 19 +- .../docs/IoArgoprojEventsV1alpha1GitCreds.md | 19 +- ...IoArgoprojEventsV1alpha1GitRemoteConfig.md | 21 +- .../IoArgoprojEventsV1alpha1GithubAppCreds.md | 19 +- ...ArgoprojEventsV1alpha1GithubEventSource.md | 27 +- ...ArgoprojEventsV1alpha1GitlabEventSource.md | 27 +- ...IoArgoprojEventsV1alpha1HDFSEventSource.md | 23 +- .../IoArgoprojEventsV1alpha1HTTPTrigger.md | 27 +- .../IoArgoprojEventsV1alpha1Int64OrString.md | 19 +- ...ArgoprojEventsV1alpha1K8SResourcePolicy.md | 21 +- ...rgoprojEventsV1alpha1KafkaConsumerGroup.md | 19 +- ...oArgoprojEventsV1alpha1KafkaEventSource.md | 21 +- .../IoArgoprojEventsV1alpha1KafkaTrigger.md | 23 +- .../IoArgoprojEventsV1alpha1LogTrigger.md | 19 +- ...IoArgoprojEventsV1alpha1MQTTEventSource.md | 21 +- .../docs/IoArgoprojEventsV1alpha1Metadata.md | 23 +- .../docs/IoArgoprojEventsV1alpha1NATSAuth.md | 19 +- ...oArgoprojEventsV1alpha1NATSEventsSource.md | 21 +- .../IoArgoprojEventsV1alpha1NATSTrigger.md | 23 +- .../IoArgoprojEventsV1alpha1NSQEventSource.md | 21 +- ...oArgoprojEventsV1alpha1OpenWhiskTrigger.md | 23 +- ...ArgoprojEventsV1alpha1OwnedRepositories.md | 21 +- .../IoArgoprojEventsV1alpha1PayloadField.md | 19 +- ...ArgoprojEventsV1alpha1PubSubEventSource.md | 21 +- ...ArgoprojEventsV1alpha1PulsarEventSource.md | 25 +- .../IoArgoprojEventsV1alpha1PulsarTrigger.md | 25 +- .../docs/IoArgoprojEventsV1alpha1RateLimit.md | 19 +- ...oArgoprojEventsV1alpha1RedisEventSource.md | 23 +- ...rojEventsV1alpha1RedisStreamEventSource.md | 23 +- .../docs/IoArgoprojEventsV1alpha1Resource.md | 21 +- ...goprojEventsV1alpha1ResourceEventSource.md | 23 +- .../IoArgoprojEventsV1alpha1ResourceFilter.md | 23 +- .../IoArgoprojEventsV1alpha1S3Artifact.md | 23 +- .../docs/IoArgoprojEventsV1alpha1S3Bucket.md | 19 +- .../docs/IoArgoprojEventsV1alpha1S3Filter.md | 19 +- .../IoArgoprojEventsV1alpha1SASLConfig.md | 19 +- ...IoArgoprojEventsV1alpha1SFTPEventSource.md | 21 +- .../IoArgoprojEventsV1alpha1SNSEventSource.md | 21 +- .../IoArgoprojEventsV1alpha1SQSEventSource.md | 21 +- ...oprojEventsV1alpha1SchemaRegistryConfig.md | 19 +- .../IoArgoprojEventsV1alpha1SecureHeader.md | 19 +- .../docs/IoArgoprojEventsV1alpha1Selector.md | 19 +- .../docs/IoArgoprojEventsV1alpha1Sensor.md | 19 +- .../IoArgoprojEventsV1alpha1SensorList.md | 21 +- .../IoArgoprojEventsV1alpha1SensorSpec.md | 25 +- .../IoArgoprojEventsV1alpha1SensorStatus.md | 19 +- .../docs/IoArgoprojEventsV1alpha1Service.md | 21 +- ...oArgoprojEventsV1alpha1SlackEventSource.md | 21 +- .../IoArgoprojEventsV1alpha1SlackSender.md | 19 +- .../IoArgoprojEventsV1alpha1SlackThread.md | 19 +- .../IoArgoprojEventsV1alpha1SlackTrigger.md | 21 +- ...rgoprojEventsV1alpha1StandardK8STrigger.md | 21 +- .../docs/IoArgoprojEventsV1alpha1Status.md | 21 +- .../IoArgoprojEventsV1alpha1StatusPolicy.md | 21 +- ...rojEventsV1alpha1StorageGridEventSource.md | 23 +- ...ArgoprojEventsV1alpha1StorageGridFilter.md | 19 +- ...ArgoprojEventsV1alpha1StripeEventSource.md | 23 +- .../docs/IoArgoprojEventsV1alpha1TLSConfig.md | 19 +- .../docs/IoArgoprojEventsV1alpha1Template.md | 27 +- .../IoArgoprojEventsV1alpha1TimeFilter.md | 19 +- .../docs/IoArgoprojEventsV1alpha1Trigger.md | 21 +- ...oArgoprojEventsV1alpha1TriggerParameter.md | 19 +- ...rojEventsV1alpha1TriggerParameterSource.md | 19 +- .../IoArgoprojEventsV1alpha1TriggerPolicy.md | 19 +- ...IoArgoprojEventsV1alpha1TriggerTemplate.md | 21 +- .../IoArgoprojEventsV1alpha1URLArtifact.md | 19 +- ...IoArgoprojEventsV1alpha1ValueFromSource.md | 19 +- ...IoArgoprojEventsV1alpha1WatchPathConfig.md | 19 +- .../IoArgoprojEventsV1alpha1WebhookContext.md | 21 +- ...rgoprojEventsV1alpha1WebhookEventSource.md | 19 +- ...ArgoprojWorkflowV1alpha1ArchiveStrategy.md | 23 +- .../IoArgoprojWorkflowV1alpha1Arguments.md | 23 +- .../IoArgoprojWorkflowV1alpha1ArtGCStatus.md | 23 +- .../IoArgoprojWorkflowV1alpha1Artifact.md | 23 +- .../IoArgoprojWorkflowV1alpha1ArtifactGC.md | 19 +- ...oArgoprojWorkflowV1alpha1ArtifactGCSpec.md | 21 +- ...rgoprojWorkflowV1alpha1ArtifactGCStatus.md | 21 +- ...rgoprojWorkflowV1alpha1ArtifactLocation.md | 19 +- ...rgoprojWorkflowV1alpha1ArtifactNodeSpec.md | 21 +- ...IoArgoprojWorkflowV1alpha1ArtifactPaths.md | 23 +- ...oprojWorkflowV1alpha1ArtifactRepository.md | 19 +- ...ojWorkflowV1alpha1ArtifactRepositoryRef.md | 19 +- ...flowV1alpha1ArtifactRepositoryRefStatus.md | 19 +- ...oArgoprojWorkflowV1alpha1ArtifactResult.md | 21 +- ...orkflowV1alpha1ArtifactResultNodeStatus.md | 21 +- ...projWorkflowV1alpha1ArtifactoryArtifact.md | 21 +- ...owV1alpha1ArtifactoryArtifactRepository.md | 19 +- ...IoArgoprojWorkflowV1alpha1AzureArtifact.md | 21 +- ...WorkflowV1alpha1AzureArtifactRepository.md | 23 +- .../docs/IoArgoprojWorkflowV1alpha1Backoff.md | 19 +- .../IoArgoprojWorkflowV1alpha1BasicAuth.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Cache.md | 19 +- ...oArgoprojWorkflowV1alpha1ClientCertAuth.md | 19 +- ...WorkflowV1alpha1ClusterWorkflowTemplate.md | 23 +- ...ha1ClusterWorkflowTemplateCreateRequest.md | 19 +- ...lpha1ClusterWorkflowTemplateLintRequest.md | 19 +- ...flowV1alpha1ClusterWorkflowTemplateList.md | 23 +- ...ha1ClusterWorkflowTemplateUpdateRequest.md | 19 +- ...projWorkflowV1alpha1CollectEventRequest.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Column.md | 19 +- .../IoArgoprojWorkflowV1alpha1Condition.md | 19 +- ...IoArgoprojWorkflowV1alpha1ContainerNode.md | 37 +- ...rkflowV1alpha1ContainerSetRetryStrategy.md | 21 +- ...rojWorkflowV1alpha1ContainerSetTemplate.md | 23 +- .../IoArgoprojWorkflowV1alpha1ContinueOn.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Counter.md | 19 +- ...rkflowV1alpha1CreateCronWorkflowRequest.md | 19 +- ...ojWorkflowV1alpha1CreateS3BucketOptions.md | 19 +- .../IoArgoprojWorkflowV1alpha1CronWorkflow.md | 23 +- ...rgoprojWorkflowV1alpha1CronWorkflowList.md | 23 +- ...rkflowV1alpha1CronWorkflowResumeRequest.md | 19 +- ...rgoprojWorkflowV1alpha1CronWorkflowSpec.md | 25 +- ...oprojWorkflowV1alpha1CronWorkflowStatus.md | 23 +- ...kflowV1alpha1CronWorkflowSuspendRequest.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1DAGTask.md | 27 +- .../IoArgoprojWorkflowV1alpha1DAGTemplate.md | 21 +- .../docs/IoArgoprojWorkflowV1alpha1Data.md | 21 +- .../IoArgoprojWorkflowV1alpha1DataSource.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Event.md | 19 +- ...oArgoprojWorkflowV1alpha1ExecutorConfig.md | 19 +- .../IoArgoprojWorkflowV1alpha1GCSArtifact.md | 21 +- ...ojWorkflowV1alpha1GCSArtifactRepository.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Gauge.md | 21 +- ...projWorkflowV1alpha1GetUserInfoResponse.md | 21 +- .../IoArgoprojWorkflowV1alpha1GitArtifact.md | 23 +- .../IoArgoprojWorkflowV1alpha1HDFSArtifact.md | 23 +- ...jWorkflowV1alpha1HDFSArtifactRepository.md | 21 +- .../docs/IoArgoprojWorkflowV1alpha1HTTP.md | 23 +- .../IoArgoprojWorkflowV1alpha1HTTPArtifact.md | 23 +- .../IoArgoprojWorkflowV1alpha1HTTPAuth.md | 19 +- ...oArgoprojWorkflowV1alpha1HTTPBodySource.md | 21 +- .../IoArgoprojWorkflowV1alpha1HTTPHeader.md | 19 +- ...rgoprojWorkflowV1alpha1HTTPHeaderSource.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Header.md | 19 +- .../IoArgoprojWorkflowV1alpha1Histogram.md | 21 +- .../IoArgoprojWorkflowV1alpha1InfoResponse.md | 25 +- .../docs/IoArgoprojWorkflowV1alpha1Inputs.md | 23 +- .../IoArgoprojWorkflowV1alpha1LabelKeys.md | 21 +- ...oArgoprojWorkflowV1alpha1LabelValueFrom.md | 19 +- .../IoArgoprojWorkflowV1alpha1LabelValues.md | 21 +- ...IoArgoprojWorkflowV1alpha1LifecycleHook.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Link.md | 19 +- ...WorkflowV1alpha1LintCronWorkflowRequest.md | 19 +- .../IoArgoprojWorkflowV1alpha1LogEntry.md | 19 +- .../IoArgoprojWorkflowV1alpha1ManifestFrom.md | 19 +- ...goprojWorkflowV1alpha1MemoizationStatus.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Memoize.md | 19 +- .../IoArgoprojWorkflowV1alpha1Metadata.md | 23 +- .../IoArgoprojWorkflowV1alpha1MetricLabel.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Metrics.md | 21 +- .../docs/IoArgoprojWorkflowV1alpha1Mutex.md | 19 +- .../IoArgoprojWorkflowV1alpha1MutexHolding.md | 19 +- .../IoArgoprojWorkflowV1alpha1MutexStatus.md | 23 +- .../IoArgoprojWorkflowV1alpha1NodeFlag.md | 19 +- .../IoArgoprojWorkflowV1alpha1NodeResult.md | 19 +- .../IoArgoprojWorkflowV1alpha1NodeStatus.md | 31 +- ...rkflowV1alpha1NodeSynchronizationStatus.md | 19 +- .../IoArgoprojWorkflowV1alpha1OAuth2Auth.md | 23 +- ...projWorkflowV1alpha1OAuth2EndpointParam.md | 19 +- .../IoArgoprojWorkflowV1alpha1OSSArtifact.md | 21 +- ...ojWorkflowV1alpha1OSSArtifactRepository.md | 19 +- ...rgoprojWorkflowV1alpha1OSSLifecycleRule.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Outputs.md | 23 +- ...IoArgoprojWorkflowV1alpha1ParallelSteps.md | 19 +- .../IoArgoprojWorkflowV1alpha1Parameter.md | 23 +- .../docs/IoArgoprojWorkflowV1alpha1PodGC.md | 19 +- .../IoArgoprojWorkflowV1alpha1Prometheus.md | 25 +- .../IoArgoprojWorkflowV1alpha1RawArtifact.md | 19 +- ...rgoprojWorkflowV1alpha1ResourceTemplate.md | 21 +- ...V1alpha1ResubmitArchivedWorkflowRequest.md | 21 +- ...IoArgoprojWorkflowV1alpha1RetryAffinity.md | 21 +- ...lowV1alpha1RetryArchivedWorkflowRequest.md | 21 +- ...IoArgoprojWorkflowV1alpha1RetryStrategy.md | 19 +- .../IoArgoprojWorkflowV1alpha1S3Artifact.md | 19 +- ...rojWorkflowV1alpha1S3ArtifactRepository.md | 19 +- ...projWorkflowV1alpha1S3EncryptionOptions.md | 19 +- ...oArgoprojWorkflowV1alpha1ScriptTemplate.md | 35 +- ...rgoprojWorkflowV1alpha1SemaphoreHolding.md | 21 +- .../IoArgoprojWorkflowV1alpha1SemaphoreRef.md | 19 +- ...ArgoprojWorkflowV1alpha1SemaphoreStatus.md | 23 +- .../IoArgoprojWorkflowV1alpha1Sequence.md | 19 +- .../IoArgoprojWorkflowV1alpha1StopStrategy.md | 19 +- .../docs/IoArgoprojWorkflowV1alpha1Submit.md | 21 +- .../IoArgoprojWorkflowV1alpha1SubmitOpts.md | 21 +- ...ArgoprojWorkflowV1alpha1SuspendTemplate.md | 19 +- ...ArgoprojWorkflowV1alpha1Synchronization.md | 19 +- ...ojWorkflowV1alpha1SynchronizationStatus.md | 19 +- .../IoArgoprojWorkflowV1alpha1TTLStrategy.md | 19 +- .../IoArgoprojWorkflowV1alpha1TarStrategy.md | 19 +- .../IoArgoprojWorkflowV1alpha1Template.md | 35 +- .../IoArgoprojWorkflowV1alpha1TemplateRef.md | 19 +- ...oprojWorkflowV1alpha1TransformationStep.md | 19 +- ...rkflowV1alpha1UpdateCronWorkflowRequest.md | 19 +- ...IoArgoprojWorkflowV1alpha1UserContainer.md | 35 +- .../IoArgoprojWorkflowV1alpha1ValueFrom.md | 21 +- .../docs/IoArgoprojWorkflowV1alpha1Version.md | 19 +- ...IoArgoprojWorkflowV1alpha1VolumeClaimGC.md | 19 +- .../IoArgoprojWorkflowV1alpha1Workflow.md | 23 +- ...ojWorkflowV1alpha1WorkflowCreateRequest.md | 19 +- ...rojWorkflowV1alpha1WorkflowEventBinding.md | 23 +- ...orkflowV1alpha1WorkflowEventBindingList.md | 23 +- ...orkflowV1alpha1WorkflowEventBindingSpec.md | 19 +- ...WorkflowV1alpha1WorkflowLevelArtifactGC.md | 19 +- ...projWorkflowV1alpha1WorkflowLintRequest.md | 19 +- .../IoArgoprojWorkflowV1alpha1WorkflowList.md | 23 +- ...rgoprojWorkflowV1alpha1WorkflowMetadata.md | 25 +- ...WorkflowV1alpha1WorkflowResubmitRequest.md | 21 +- ...ojWorkflowV1alpha1WorkflowResumeRequest.md | 19 +- ...rojWorkflowV1alpha1WorkflowRetryRequest.md | 21 +- ...oprojWorkflowV1alpha1WorkflowSetRequest.md | 19 +- .../IoArgoprojWorkflowV1alpha1WorkflowSpec.md | 35 +- ...oArgoprojWorkflowV1alpha1WorkflowStatus.md | 31 +- .../IoArgoprojWorkflowV1alpha1WorkflowStep.md | 23 +- ...projWorkflowV1alpha1WorkflowStopRequest.md | 19 +- ...ojWorkflowV1alpha1WorkflowSubmitRequest.md | 19 +- ...jWorkflowV1alpha1WorkflowSuspendRequest.md | 19 +- ...projWorkflowV1alpha1WorkflowTaskSetSpec.md | 21 +- ...ojWorkflowV1alpha1WorkflowTaskSetStatus.md | 21 +- ...rgoprojWorkflowV1alpha1WorkflowTemplate.md | 23 +- ...owV1alpha1WorkflowTemplateCreateRequest.md | 19 +- ...flowV1alpha1WorkflowTemplateLintRequest.md | 19 +- ...rojWorkflowV1alpha1WorkflowTemplateList.md | 23 +- ...projWorkflowV1alpha1WorkflowTemplateRef.md | 19 +- ...owV1alpha1WorkflowTemplateUpdateRequest.md | 19 +- ...orkflowV1alpha1WorkflowTerminateRequest.md | 19 +- ...oprojWorkflowV1alpha1WorkflowWatchEvent.md | 19 +- ...IoK8sApiPolicyV1PodDisruptionBudgetSpec.md | 19 +- sdks/python/client/docs/KeyToPath.md | 21 +- sdks/python/client/docs/LabelSelector.md | 23 +- .../client/docs/LabelSelectorRequirement.md | 21 +- sdks/python/client/docs/Lifecycle.md | 19 +- sdks/python/client/docs/LifecycleHandler.md | 21 +- sdks/python/client/docs/ListMeta.md | 21 +- .../client/docs/LocalObjectReference.md | 19 +- sdks/python/client/docs/ManagedFieldsEntry.md | 21 +- sdks/python/client/docs/NFSVolumeSource.md | 21 +- sdks/python/client/docs/NodeAffinity.md | 21 +- sdks/python/client/docs/NodeSelector.md | 21 +- .../client/docs/NodeSelectorRequirement.md | 21 +- sdks/python/client/docs/NodeSelectorTerm.md | 23 +- .../python/client/docs/ObjectFieldSelector.md | 21 +- sdks/python/client/docs/ObjectMeta.md | 29 +- sdks/python/client/docs/ObjectReference.md | 19 +- sdks/python/client/docs/OwnerReference.md | 23 +- .../client/docs/PersistentVolumeClaim.md | 19 +- .../docs/PersistentVolumeClaimCondition.md | 23 +- .../client/docs/PersistentVolumeClaimSpec.md | 21 +- .../docs/PersistentVolumeClaimStatus.md | 27 +- .../docs/PersistentVolumeClaimTemplate.md | 21 +- .../docs/PersistentVolumeClaimVolumeSource.md | 19 +- .../docs/PhotonPersistentDiskVolumeSource.md | 21 +- sdks/python/client/docs/PodAffinity.md | 23 +- sdks/python/client/docs/PodAffinityTerm.md | 23 +- sdks/python/client/docs/PodAntiAffinity.md | 23 +- sdks/python/client/docs/PodDNSConfig.md | 25 +- sdks/python/client/docs/PodDNSConfigOption.md | 19 +- sdks/python/client/docs/PodSecurityContext.md | 23 +- .../client/docs/PortworxVolumeSource.md | 21 +- .../client/docs/PreferredSchedulingTerm.md | 19 +- sdks/python/client/docs/Probe.md | 21 +- .../client/docs/ProjectedVolumeSource.md | 21 +- .../python/client/docs/QuobyteVolumeSource.md | 23 +- sdks/python/client/docs/RBDVolumeSource.md | 23 +- .../client/docs/ResourceFieldSelector.md | 21 +- .../client/docs/ResourceRequirements.md | 23 +- sdks/python/client/docs/SELinuxOptions.md | 19 +- .../python/client/docs/ScaleIOVolumeSource.md | 25 +- sdks/python/client/docs/SeccompProfile.md | 21 +- sdks/python/client/docs/SecretEnvSource.md | 19 +- sdks/python/client/docs/SecretKeySelector.md | 19 +- sdks/python/client/docs/SecretProjection.md | 21 +- sdks/python/client/docs/SecretVolumeSource.md | 21 +- sdks/python/client/docs/SecurityContext.md | 19 +- .../client/docs/SensorCreateSensorRequest.md | 19 +- sdks/python/client/docs/SensorLogEntry.md | 19 +- .../client/docs/SensorSensorWatchEvent.md | 19 +- sdks/python/client/docs/SensorServiceApi.md | 4230 +- .../client/docs/SensorUpdateSensorRequest.md | 19 +- .../docs/ServiceAccountTokenProjection.md | 21 +- sdks/python/client/docs/ServicePort.md | 21 +- sdks/python/client/docs/StatusCause.md | 19 +- .../client/docs/StorageOSVolumeSource.md | 19 +- .../python/client/docs/StreamResultOfEvent.md | 19 +- ...esultOfEventsourceEventSourceWatchEvent.md | 19 +- .../docs/StreamResultOfEventsourceLogEntry.md | 19 +- ...ultOfIoArgoprojWorkflowV1alpha1LogEntry.md | 19 +- ...oprojWorkflowV1alpha1WorkflowWatchEvent.md | 19 +- .../docs/StreamResultOfSensorLogEntry.md | 19 +- .../StreamResultOfSensorSensorWatchEvent.md | 19 +- sdks/python/client/docs/Sysctl.md | 19 +- sdks/python/client/docs/TCPSocketAction.md | 21 +- sdks/python/client/docs/Toleration.md | 19 +- .../client/docs/TypedLocalObjectReference.md | 21 +- sdks/python/client/docs/Volume.md | 21 +- sdks/python/client/docs/VolumeDevice.md | 19 +- sdks/python/client/docs/VolumeMount.md | 21 +- sdks/python/client/docs/VolumeProjection.md | 19 +- .../docs/VsphereVirtualDiskVolumeSource.md | 21 +- .../client/docs/WeightedPodAffinityTerm.md | 19 +- .../docs/WindowsSecurityContextOptions.md | 19 +- sdks/python/client/docs/WorkflowServiceApi.md | 55430 +--------------- .../client/docs/WorkflowTemplateServiceApi.md | 32266 +-------- sdks/python/client/requirements.txt | 4 +- sdks/python/client/setup.py | 29 +- sdks/python/flake.nix | 8 +- 1218 files changed, 74368 insertions(+), 285976 deletions(-) create mode 100644 sdks/python/client/argo_workflows/api_response.py delete mode 100644 sdks/python/client/argo_workflows/apis/__init__.py delete mode 100644 sdks/python/client/argo_workflows/model/__init__.py delete mode 100644 sdks/python/client/argo_workflows/model/affinity.py delete mode 100644 sdks/python/client/argo_workflows/model/aws_elastic_block_store_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/azure_disk_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/azure_file_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/capabilities.py delete mode 100644 sdks/python/client/argo_workflows/model/ceph_fs_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/cinder_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/config_map_env_source.py delete mode 100644 sdks/python/client/argo_workflows/model/config_map_key_selector.py delete mode 100644 sdks/python/client/argo_workflows/model/config_map_projection.py delete mode 100644 sdks/python/client/argo_workflows/model/config_map_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/container.py delete mode 100644 sdks/python/client/argo_workflows/model/container_port.py delete mode 100644 sdks/python/client/argo_workflows/model/create_options.py delete mode 100644 sdks/python/client/argo_workflows/model/csi_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/downward_api_projection.py delete mode 100644 sdks/python/client/argo_workflows/model/downward_api_volume_file.py delete mode 100644 sdks/python/client/argo_workflows/model/downward_api_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/duration.py delete mode 100644 sdks/python/client/argo_workflows/model/empty_dir_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/env_from_source.py delete mode 100644 sdks/python/client/argo_workflows/model/env_var.py delete mode 100644 sdks/python/client/argo_workflows/model/env_var_source.py delete mode 100644 sdks/python/client/argo_workflows/model/ephemeral_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/event.py delete mode 100644 sdks/python/client/argo_workflows/model/event_series.py delete mode 100644 sdks/python/client/argo_workflows/model/event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/eventsource_create_event_source_request.py delete mode 100644 sdks/python/client/argo_workflows/model/eventsource_event_source_watch_event.py delete mode 100644 sdks/python/client/argo_workflows/model/eventsource_log_entry.py delete mode 100644 sdks/python/client/argo_workflows/model/eventsource_update_event_source_request.py delete mode 100644 sdks/python/client/argo_workflows/model/exec_action.py delete mode 100644 sdks/python/client/argo_workflows/model/fc_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/flex_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/flocker_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/gce_persistent_disk_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/git_repo_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/glusterfs_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/google_protobuf_any.py delete mode 100644 sdks/python/client/argo_workflows/model/group_version_resource.py delete mode 100644 sdks/python/client/argo_workflows/model/grpc_action.py delete mode 100644 sdks/python/client/argo_workflows/model/grpc_gateway_runtime_error.py delete mode 100644 sdks/python/client/argo_workflows/model/grpc_gateway_runtime_stream_error.py delete mode 100644 sdks/python/client/argo_workflows/model/host_alias.py delete mode 100644 sdks/python/client/argo_workflows/model/host_path_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/http_get_action.py delete mode 100644 sdks/python/client/argo_workflows/model/http_header.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amount.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_consume_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_argo_workflow_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_artifact_location.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_aws_lambda_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_backoff.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_basic_auth.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_auth.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_repository.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_repository.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_calendar_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_condition.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_by_time.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_criteria.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_config_map_persistence.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_custom_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_data_filter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_email_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_emitter_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_context.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_filter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_transformer.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_persistence.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_filter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_list.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_expr_filter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_generic_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gerrit_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_creds.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_remote_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_app_creds.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gitlab_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_hdfs_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_http_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_int64_or_string.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_k8_s_resource_policy.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_consumer_group.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_log_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_metadata.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_mqtt_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_auth.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_events_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nsq_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_open_whisk_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_owned_repositories.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_payload_field.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pub_sub_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_rate_limit.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_stream_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_filter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_bucket.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_filter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sasl_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_schema_registry_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_secure_header.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_selector.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_list.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_service.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sftp_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_sender.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_thread.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sns_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sqs_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status_policy.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_filter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_stripe_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_time_filter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_tls_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_policy.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_url_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_value_from_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_watch_path_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_context.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_event_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_archive_strategy.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_arguments.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_art_gc_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_location.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_node_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_paths.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository_ref.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository_ref_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifactory_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifactory_artifact_repository.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_backoff.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_basic_auth.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cache.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_client_cert_auth.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_list.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_collect_event_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_column.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_condition.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_node.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_set_retry_strategy.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_set_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_continue_on.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_counter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_create_cron_workflow_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_create_s3_bucket_options.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_list.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_resume_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_dag_task.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_dag_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_data.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_data_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_event.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_executor_config.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gauge.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gcs_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gcs_artifact_repository.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_get_user_info_response.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_git_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_hdfs_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_hdfs_artifact_repository.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_header.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_histogram.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_auth.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_body_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_header.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_header_source.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_info_response.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_inputs.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_keys.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_value_from.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_values.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lifecycle_hook.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_link.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lint_cron_workflow_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_log_entry.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_manifest_from.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_memoization_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_memoize.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metadata.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metric_label.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metrics.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex_holding.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_flag.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_result.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_synchronization_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_auth.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_artifact_repository.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_lifecycle_rule.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_outputs.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_parallel_steps.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_parameter.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_pod_gc.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_prometheus.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_raw_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resource_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_affinity.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_strategy.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_artifact.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_artifact_repository.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_encryption_options.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_script_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_holding.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_ref.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_sequence.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_stop_strategy.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_submit.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_submit_opts.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_suspend_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_synchronization.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_synchronization_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_tar_strategy.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_template_ref.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_transformation_step.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_ttl_strategy.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_update_cron_workflow_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_user_container.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_value_from.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_version.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_volume_claim_gc.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_create_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding_list.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_lint_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_list.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_metadata.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resume_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_retry_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_set_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_step.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_stop_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_submit_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_suspend_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_task_set_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_task_set_status.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_create_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_lint_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_list.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_ref.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_update_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_terminate_request.py delete mode 100644 sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_watch_event.py delete mode 100644 sdks/python/client/argo_workflows/model/io_k8s_api_policy_v1_pod_disruption_budget_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/iscsi_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/key_to_path.py delete mode 100644 sdks/python/client/argo_workflows/model/label_selector.py delete mode 100644 sdks/python/client/argo_workflows/model/label_selector_requirement.py delete mode 100644 sdks/python/client/argo_workflows/model/lifecycle.py delete mode 100644 sdks/python/client/argo_workflows/model/lifecycle_handler.py delete mode 100644 sdks/python/client/argo_workflows/model/list_meta.py delete mode 100644 sdks/python/client/argo_workflows/model/local_object_reference.py delete mode 100644 sdks/python/client/argo_workflows/model/managed_fields_entry.py delete mode 100644 sdks/python/client/argo_workflows/model/nfs_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/node_affinity.py delete mode 100644 sdks/python/client/argo_workflows/model/node_selector.py delete mode 100644 sdks/python/client/argo_workflows/model/node_selector_requirement.py delete mode 100644 sdks/python/client/argo_workflows/model/node_selector_term.py delete mode 100644 sdks/python/client/argo_workflows/model/object_field_selector.py delete mode 100644 sdks/python/client/argo_workflows/model/object_meta.py delete mode 100644 sdks/python/client/argo_workflows/model/object_reference.py delete mode 100644 sdks/python/client/argo_workflows/model/owner_reference.py delete mode 100644 sdks/python/client/argo_workflows/model/persistent_volume_claim.py delete mode 100644 sdks/python/client/argo_workflows/model/persistent_volume_claim_condition.py delete mode 100644 sdks/python/client/argo_workflows/model/persistent_volume_claim_spec.py delete mode 100644 sdks/python/client/argo_workflows/model/persistent_volume_claim_status.py delete mode 100644 sdks/python/client/argo_workflows/model/persistent_volume_claim_template.py delete mode 100644 sdks/python/client/argo_workflows/model/persistent_volume_claim_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/photon_persistent_disk_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/pod_affinity.py delete mode 100644 sdks/python/client/argo_workflows/model/pod_affinity_term.py delete mode 100644 sdks/python/client/argo_workflows/model/pod_anti_affinity.py delete mode 100644 sdks/python/client/argo_workflows/model/pod_dns_config.py delete mode 100644 sdks/python/client/argo_workflows/model/pod_dns_config_option.py delete mode 100644 sdks/python/client/argo_workflows/model/pod_security_context.py delete mode 100644 sdks/python/client/argo_workflows/model/portworx_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/preferred_scheduling_term.py delete mode 100644 sdks/python/client/argo_workflows/model/probe.py delete mode 100644 sdks/python/client/argo_workflows/model/projected_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/quobyte_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/rbd_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/resource_field_selector.py delete mode 100644 sdks/python/client/argo_workflows/model/resource_requirements.py delete mode 100644 sdks/python/client/argo_workflows/model/scale_io_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/se_linux_options.py delete mode 100644 sdks/python/client/argo_workflows/model/seccomp_profile.py delete mode 100644 sdks/python/client/argo_workflows/model/secret_env_source.py delete mode 100644 sdks/python/client/argo_workflows/model/secret_key_selector.py delete mode 100644 sdks/python/client/argo_workflows/model/secret_projection.py delete mode 100644 sdks/python/client/argo_workflows/model/secret_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/security_context.py delete mode 100644 sdks/python/client/argo_workflows/model/sensor_create_sensor_request.py delete mode 100644 sdks/python/client/argo_workflows/model/sensor_log_entry.py delete mode 100644 sdks/python/client/argo_workflows/model/sensor_sensor_watch_event.py delete mode 100644 sdks/python/client/argo_workflows/model/sensor_update_sensor_request.py delete mode 100644 sdks/python/client/argo_workflows/model/service_account_token_projection.py delete mode 100644 sdks/python/client/argo_workflows/model/service_port.py delete mode 100644 sdks/python/client/argo_workflows/model/status_cause.py delete mode 100644 sdks/python/client/argo_workflows/model/storage_os_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/stream_result_of_event.py delete mode 100644 sdks/python/client/argo_workflows/model/stream_result_of_eventsource_event_source_watch_event.py delete mode 100644 sdks/python/client/argo_workflows/model/stream_result_of_eventsource_log_entry.py delete mode 100644 sdks/python/client/argo_workflows/model/stream_result_of_io_argoproj_workflow_v1alpha1_log_entry.py delete mode 100644 sdks/python/client/argo_workflows/model/stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event.py delete mode 100644 sdks/python/client/argo_workflows/model/stream_result_of_sensor_log_entry.py delete mode 100644 sdks/python/client/argo_workflows/model/stream_result_of_sensor_sensor_watch_event.py delete mode 100644 sdks/python/client/argo_workflows/model/sysctl.py delete mode 100644 sdks/python/client/argo_workflows/model/tcp_socket_action.py delete mode 100644 sdks/python/client/argo_workflows/model/toleration.py delete mode 100644 sdks/python/client/argo_workflows/model/typed_local_object_reference.py delete mode 100644 sdks/python/client/argo_workflows/model/volume.py delete mode 100644 sdks/python/client/argo_workflows/model/volume_device.py delete mode 100644 sdks/python/client/argo_workflows/model/volume_mount.py delete mode 100644 sdks/python/client/argo_workflows/model/volume_projection.py delete mode 100644 sdks/python/client/argo_workflows/model/vsphere_virtual_disk_volume_source.py delete mode 100644 sdks/python/client/argo_workflows/model/weighted_pod_affinity_term.py delete mode 100644 sdks/python/client/argo_workflows/model/windows_security_context_options.py delete mode 100644 sdks/python/client/argo_workflows/model_utils.py create mode 100644 sdks/python/client/argo_workflows/models/affinity.py create mode 100644 sdks/python/client/argo_workflows/models/aws_elastic_block_store_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/azure_disk_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/azure_file_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/capabilities.py create mode 100644 sdks/python/client/argo_workflows/models/ceph_fs_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/cinder_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/config_map_env_source.py create mode 100644 sdks/python/client/argo_workflows/models/config_map_key_selector.py create mode 100644 sdks/python/client/argo_workflows/models/config_map_projection.py create mode 100644 sdks/python/client/argo_workflows/models/config_map_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/container.py create mode 100644 sdks/python/client/argo_workflows/models/container_port.py create mode 100644 sdks/python/client/argo_workflows/models/create_options.py create mode 100644 sdks/python/client/argo_workflows/models/csi_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/downward_api_projection.py create mode 100644 sdks/python/client/argo_workflows/models/downward_api_volume_file.py create mode 100644 sdks/python/client/argo_workflows/models/downward_api_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/duration.py create mode 100644 sdks/python/client/argo_workflows/models/empty_dir_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/env_from_source.py create mode 100644 sdks/python/client/argo_workflows/models/env_var.py create mode 100644 sdks/python/client/argo_workflows/models/env_var_source.py create mode 100644 sdks/python/client/argo_workflows/models/ephemeral_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/event.py create mode 100644 sdks/python/client/argo_workflows/models/event_series.py create mode 100644 sdks/python/client/argo_workflows/models/event_source.py create mode 100644 sdks/python/client/argo_workflows/models/eventsource_create_event_source_request.py create mode 100644 sdks/python/client/argo_workflows/models/eventsource_event_source_watch_event.py create mode 100644 sdks/python/client/argo_workflows/models/eventsource_log_entry.py create mode 100644 sdks/python/client/argo_workflows/models/eventsource_update_event_source_request.py create mode 100644 sdks/python/client/argo_workflows/models/exec_action.py create mode 100644 sdks/python/client/argo_workflows/models/fc_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/flex_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/flocker_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/gce_persistent_disk_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/git_repo_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/glusterfs_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/google_protobuf_any.py create mode 100644 sdks/python/client/argo_workflows/models/group_version_resource.py create mode 100644 sdks/python/client/argo_workflows/models/grpc_action.py create mode 100644 sdks/python/client/argo_workflows/models/grpc_gateway_runtime_error.py create mode 100644 sdks/python/client/argo_workflows/models/grpc_gateway_runtime_stream_error.py create mode 100644 sdks/python/client/argo_workflows/models/host_alias.py create mode 100644 sdks/python/client/argo_workflows/models/host_path_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/http_get_action.py create mode 100644 sdks/python/client/argo_workflows/models/http_header.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amount.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_consume_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_argo_workflow_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_artifact_location.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_aws_lambda_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_backoff.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_basic_auth.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_auth.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_repository.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_server_repository.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_calendar_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_catchup_configuration.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_condition.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_conditions_reset_by_time.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_conditions_reset_criteria.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_config_map_persistence.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_custom_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_data_filter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_email_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_emitter_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_context.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency_filter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency_transformer.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_persistence.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_filter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_list.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_spec.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_expr_filter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_file_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_file_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_generic_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_gerrit_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_creds.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_remote_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_github_app_creds.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_github_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_gitlab_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_hdfs_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_http_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_int64_or_string.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_k8_s_resource_policy.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_consumer_group.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_log_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_metadata.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_mqtt_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_auth.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_events_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nsq_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_open_whisk_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_owned_repositories.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_payload_field.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pub_sub_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pulsar_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pulsar_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_rate_limit.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_redis_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_redis_stream_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource_filter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_bucket.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_filter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sasl_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_schema_registry_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_secure_header.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_selector.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_list.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_spec.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_service.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sftp_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_sender.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_thread.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sns_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sqs_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_status_policy.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_storage_grid_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_storage_grid_filter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_stripe_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_time_filter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_tls_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_parameter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_parameter_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_policy.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_url_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_value_from_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_watch_path_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_webhook_context.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_webhook_event_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_archive_strategy.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_arguments.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_art_gc_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_location.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_node_spec.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_paths.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository_ref.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository_ref_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_result.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifactory_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifactory_artifact_repository.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_azure_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_backoff.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_basic_auth.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cache.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_client_cert_auth.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_list.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_collect_event_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_column.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_condition.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_node.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_set_retry_strategy.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_set_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_continue_on.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_counter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_create_cron_workflow_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_create_s3_bucket_options.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_list.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_resume_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_spec.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_dag_task.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_dag_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_data.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_data_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_event.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_executor_config.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gauge.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gcs_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gcs_artifact_repository.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_get_user_info_response.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_git_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_hdfs_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_hdfs_artifact_repository.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_header.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_histogram.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_auth.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_body_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_header.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_header_source.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_info_response.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_inputs.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_keys.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_value_from.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_values.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_lifecycle_hook.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_link.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_lint_cron_workflow_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_log_entry.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_manifest_from.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_memoization_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_memoize.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metadata.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metric_label.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metrics.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex_holding.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_flag.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_result.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_synchronization_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_o_auth2_auth.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_artifact_repository.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_lifecycle_rule.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_outputs.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_parallel_steps.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_parameter.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_pod_gc.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_prometheus.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_raw_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_resource_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_affinity.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_strategy.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_artifact.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_artifact_repository.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_encryption_options.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_script_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_holding.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_ref.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_sequence.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_stop_strategy.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_submit.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_submit_opts.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_suspend_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_synchronization.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_synchronization_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_tar_strategy.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_template_ref.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_transformation_step.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_ttl_strategy.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_update_cron_workflow_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_user_container.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_value_from.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_version.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_volume_claim_gc.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_create_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding_list.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding_spec.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_lint_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_list.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_metadata.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_resume_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_retry_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_set_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_spec.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_step.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_stop_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_submit_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_suspend_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_task_set_spec.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_task_set_status.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_create_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_lint_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_list.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_ref.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_update_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_terminate_request.py create mode 100644 sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_watch_event.py create mode 100644 sdks/python/client/argo_workflows/models/io_k8s_api_policy_v1_pod_disruption_budget_spec.py create mode 100644 sdks/python/client/argo_workflows/models/iscsi_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/key_to_path.py create mode 100644 sdks/python/client/argo_workflows/models/label_selector.py create mode 100644 sdks/python/client/argo_workflows/models/label_selector_requirement.py create mode 100644 sdks/python/client/argo_workflows/models/lifecycle.py create mode 100644 sdks/python/client/argo_workflows/models/lifecycle_handler.py create mode 100644 sdks/python/client/argo_workflows/models/list_meta.py create mode 100644 sdks/python/client/argo_workflows/models/local_object_reference.py create mode 100644 sdks/python/client/argo_workflows/models/managed_fields_entry.py create mode 100644 sdks/python/client/argo_workflows/models/nfs_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/node_affinity.py create mode 100644 sdks/python/client/argo_workflows/models/node_selector.py create mode 100644 sdks/python/client/argo_workflows/models/node_selector_requirement.py create mode 100644 sdks/python/client/argo_workflows/models/node_selector_term.py create mode 100644 sdks/python/client/argo_workflows/models/object_field_selector.py create mode 100644 sdks/python/client/argo_workflows/models/object_meta.py create mode 100644 sdks/python/client/argo_workflows/models/object_reference.py create mode 100644 sdks/python/client/argo_workflows/models/owner_reference.py create mode 100644 sdks/python/client/argo_workflows/models/persistent_volume_claim.py create mode 100644 sdks/python/client/argo_workflows/models/persistent_volume_claim_condition.py create mode 100644 sdks/python/client/argo_workflows/models/persistent_volume_claim_spec.py create mode 100644 sdks/python/client/argo_workflows/models/persistent_volume_claim_status.py create mode 100644 sdks/python/client/argo_workflows/models/persistent_volume_claim_template.py create mode 100644 sdks/python/client/argo_workflows/models/persistent_volume_claim_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/photon_persistent_disk_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/pod_affinity.py create mode 100644 sdks/python/client/argo_workflows/models/pod_affinity_term.py create mode 100644 sdks/python/client/argo_workflows/models/pod_anti_affinity.py create mode 100644 sdks/python/client/argo_workflows/models/pod_dns_config.py create mode 100644 sdks/python/client/argo_workflows/models/pod_dns_config_option.py create mode 100644 sdks/python/client/argo_workflows/models/pod_security_context.py create mode 100644 sdks/python/client/argo_workflows/models/portworx_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/preferred_scheduling_term.py create mode 100644 sdks/python/client/argo_workflows/models/probe.py create mode 100644 sdks/python/client/argo_workflows/models/projected_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/quobyte_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/rbd_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/resource_field_selector.py create mode 100644 sdks/python/client/argo_workflows/models/resource_requirements.py create mode 100644 sdks/python/client/argo_workflows/models/scale_io_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/se_linux_options.py create mode 100644 sdks/python/client/argo_workflows/models/seccomp_profile.py create mode 100644 sdks/python/client/argo_workflows/models/secret_env_source.py create mode 100644 sdks/python/client/argo_workflows/models/secret_key_selector.py create mode 100644 sdks/python/client/argo_workflows/models/secret_projection.py create mode 100644 sdks/python/client/argo_workflows/models/secret_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/security_context.py create mode 100644 sdks/python/client/argo_workflows/models/sensor_create_sensor_request.py create mode 100644 sdks/python/client/argo_workflows/models/sensor_log_entry.py create mode 100644 sdks/python/client/argo_workflows/models/sensor_sensor_watch_event.py create mode 100644 sdks/python/client/argo_workflows/models/sensor_update_sensor_request.py create mode 100644 sdks/python/client/argo_workflows/models/service_account_token_projection.py create mode 100644 sdks/python/client/argo_workflows/models/service_port.py create mode 100644 sdks/python/client/argo_workflows/models/status_cause.py create mode 100644 sdks/python/client/argo_workflows/models/storage_os_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/stream_result_of_event.py create mode 100644 sdks/python/client/argo_workflows/models/stream_result_of_eventsource_event_source_watch_event.py create mode 100644 sdks/python/client/argo_workflows/models/stream_result_of_eventsource_log_entry.py create mode 100644 sdks/python/client/argo_workflows/models/stream_result_of_io_argoproj_workflow_v1alpha1_log_entry.py create mode 100644 sdks/python/client/argo_workflows/models/stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event.py create mode 100644 sdks/python/client/argo_workflows/models/stream_result_of_sensor_log_entry.py create mode 100644 sdks/python/client/argo_workflows/models/stream_result_of_sensor_sensor_watch_event.py create mode 100644 sdks/python/client/argo_workflows/models/sysctl.py create mode 100644 sdks/python/client/argo_workflows/models/tcp_socket_action.py create mode 100644 sdks/python/client/argo_workflows/models/toleration.py create mode 100644 sdks/python/client/argo_workflows/models/typed_local_object_reference.py create mode 100644 sdks/python/client/argo_workflows/models/volume.py create mode 100644 sdks/python/client/argo_workflows/models/volume_device.py create mode 100644 sdks/python/client/argo_workflows/models/volume_mount.py create mode 100644 sdks/python/client/argo_workflows/models/volume_projection.py create mode 100644 sdks/python/client/argo_workflows/models/vsphere_virtual_disk_volume_source.py create mode 100644 sdks/python/client/argo_workflows/models/weighted_pod_affinity_term.py create mode 100644 sdks/python/client/argo_workflows/models/windows_security_context_options.py create mode 100644 sdks/python/client/argo_workflows/py.typed diff --git a/sdks/python/Makefile b/sdks/python/Makefile index a9b71d2df457..7a06d78e7bf7 100755 --- a/sdks/python/Makefile +++ b/sdks/python/Makefile @@ -30,7 +30,7 @@ else sed 's/io.k8s.apimachinery.pkg.apis.meta.v1.//' \ > $(WD)/swagger.json cp ../../LICENSE $(WD)/LICENSE - $(DOCKER) openapitools/openapi-generator-cli:v5.4.0 \ + $(DOCKER) openapitools/openapi-generator-cli:v7.4.0 \ generate \ --input-spec /wd/swagger.json \ --generator-name python \ diff --git a/sdks/python/client/argo_workflows/__init__.py b/sdks/python/client/argo_workflows/__init__.py index c333a3889136..bffee5b3c2c2 100644 --- a/sdks/python/client/argo_workflows/__init__.py +++ b/sdks/python/client/argo_workflows/__init__.py @@ -1,27 +1,436 @@ +# coding: utf-8 + # flake8: noqa """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 __version__ = "0.0.0-pre" +# import apis into sdk package +from argo_workflows.api.archived_workflow_service_api import ArchivedWorkflowServiceApi +from argo_workflows.api.artifact_service_api import ArtifactServiceApi +from argo_workflows.api.cluster_workflow_template_service_api import ClusterWorkflowTemplateServiceApi +from argo_workflows.api.cron_workflow_service_api import CronWorkflowServiceApi +from argo_workflows.api.event_service_api import EventServiceApi +from argo_workflows.api.event_source_service_api import EventSourceServiceApi +from argo_workflows.api.info_service_api import InfoServiceApi +from argo_workflows.api.sensor_service_api import SensorServiceApi +from argo_workflows.api.workflow_service_api import WorkflowServiceApi +from argo_workflows.api.workflow_template_service_api import WorkflowTemplateServiceApi + # import ApiClient +from argo_workflows.api_response import ApiResponse from argo_workflows.api_client import ApiClient - -# import Configuration from argo_workflows.configuration import Configuration - -# import exceptions from argo_workflows.exceptions import OpenApiException -from argo_workflows.exceptions import ApiAttributeError from argo_workflows.exceptions import ApiTypeError from argo_workflows.exceptions import ApiValueError from argo_workflows.exceptions import ApiKeyError +from argo_workflows.exceptions import ApiAttributeError from argo_workflows.exceptions import ApiException + +# import models into sdk package +from argo_workflows.models.aws_elastic_block_store_volume_source import AWSElasticBlockStoreVolumeSource +from argo_workflows.models.affinity import Affinity +from argo_workflows.models.azure_disk_volume_source import AzureDiskVolumeSource +from argo_workflows.models.azure_file_volume_source import AzureFileVolumeSource +from argo_workflows.models.csi_volume_source import CSIVolumeSource +from argo_workflows.models.capabilities import Capabilities +from argo_workflows.models.ceph_fs_volume_source import CephFSVolumeSource +from argo_workflows.models.cinder_volume_source import CinderVolumeSource +from argo_workflows.models.config_map_env_source import ConfigMapEnvSource +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from argo_workflows.models.config_map_projection import ConfigMapProjection +from argo_workflows.models.config_map_volume_source import ConfigMapVolumeSource +from argo_workflows.models.container import Container +from argo_workflows.models.container_port import ContainerPort +from argo_workflows.models.create_options import CreateOptions +from argo_workflows.models.downward_api_projection import DownwardAPIProjection +from argo_workflows.models.downward_api_volume_file import DownwardAPIVolumeFile +from argo_workflows.models.downward_api_volume_source import DownwardAPIVolumeSource +from argo_workflows.models.duration import Duration +from argo_workflows.models.empty_dir_volume_source import EmptyDirVolumeSource +from argo_workflows.models.env_from_source import EnvFromSource +from argo_workflows.models.env_var import EnvVar +from argo_workflows.models.env_var_source import EnvVarSource +from argo_workflows.models.ephemeral_volume_source import EphemeralVolumeSource +from argo_workflows.models.event import Event +from argo_workflows.models.event_series import EventSeries +from argo_workflows.models.event_source import EventSource +from argo_workflows.models.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest +from argo_workflows.models.eventsource_event_source_watch_event import EventsourceEventSourceWatchEvent +from argo_workflows.models.eventsource_log_entry import EventsourceLogEntry +from argo_workflows.models.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest +from argo_workflows.models.exec_action import ExecAction +from argo_workflows.models.fc_volume_source import FCVolumeSource +from argo_workflows.models.flex_volume_source import FlexVolumeSource +from argo_workflows.models.flocker_volume_source import FlockerVolumeSource +from argo_workflows.models.gce_persistent_disk_volume_source import GCEPersistentDiskVolumeSource +from argo_workflows.models.grpc_action import GRPCAction +from argo_workflows.models.git_repo_volume_source import GitRepoVolumeSource +from argo_workflows.models.glusterfs_volume_source import GlusterfsVolumeSource +from argo_workflows.models.google_protobuf_any import GoogleProtobufAny +from argo_workflows.models.group_version_resource import GroupVersionResource +from argo_workflows.models.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError +from argo_workflows.models.http_get_action import HTTPGetAction +from argo_workflows.models.http_header import HTTPHeader +from argo_workflows.models.host_alias import HostAlias +from argo_workflows.models.host_path_volume_source import HostPathVolumeSource +from argo_workflows.models.iscsi_volume_source import ISCSIVolumeSource +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_consume_config import IoArgoprojEventsV1alpha1AMQPConsumeConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_event_source import IoArgoprojEventsV1alpha1AMQPEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_exchange_declare_config import IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_queue_bind_config import IoArgoprojEventsV1alpha1AMQPQueueBindConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_queue_declare_config import IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_aws_lambda_trigger import IoArgoprojEventsV1alpha1AWSLambdaTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_amount import IoArgoprojEventsV1alpha1Amount +from argo_workflows.models.io_argoproj_events_v1alpha1_argo_workflow_trigger import IoArgoprojEventsV1alpha1ArgoWorkflowTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_event_hubs_trigger import IoArgoprojEventsV1alpha1AzureEventHubsTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_events_hub_event_source import IoArgoprojEventsV1alpha1AzureEventsHubEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_queue_storage_event_source import IoArgoprojEventsV1alpha1AzureQueueStorageEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_service_bus_event_source import IoArgoprojEventsV1alpha1AzureServiceBusEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_service_bus_trigger import IoArgoprojEventsV1alpha1AzureServiceBusTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_auth import IoArgoprojEventsV1alpha1BitbucketAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_basic_auth import IoArgoprojEventsV1alpha1BitbucketBasicAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_event_source import IoArgoprojEventsV1alpha1BitbucketEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_repository import IoArgoprojEventsV1alpha1BitbucketRepository +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_server_event_source import IoArgoprojEventsV1alpha1BitbucketServerEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_server_repository import IoArgoprojEventsV1alpha1BitbucketServerRepository +from argo_workflows.models.io_argoproj_events_v1alpha1_calendar_event_source import IoArgoprojEventsV1alpha1CalendarEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_catchup_configuration import IoArgoprojEventsV1alpha1CatchupConfiguration +from argo_workflows.models.io_argoproj_events_v1alpha1_condition import IoArgoprojEventsV1alpha1Condition +from argo_workflows.models.io_argoproj_events_v1alpha1_conditions_reset_by_time import IoArgoprojEventsV1alpha1ConditionsResetByTime +from argo_workflows.models.io_argoproj_events_v1alpha1_conditions_reset_criteria import IoArgoprojEventsV1alpha1ConditionsResetCriteria +from argo_workflows.models.io_argoproj_events_v1alpha1_config_map_persistence import IoArgoprojEventsV1alpha1ConfigMapPersistence +from argo_workflows.models.io_argoproj_events_v1alpha1_custom_trigger import IoArgoprojEventsV1alpha1CustomTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_data_filter import IoArgoprojEventsV1alpha1DataFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_email_trigger import IoArgoprojEventsV1alpha1EmailTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_emitter_event_source import IoArgoprojEventsV1alpha1EmitterEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_event_context import IoArgoprojEventsV1alpha1EventContext +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency import IoArgoprojEventsV1alpha1EventDependency +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency_filter import IoArgoprojEventsV1alpha1EventDependencyFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency_transformer import IoArgoprojEventsV1alpha1EventDependencyTransformer +from argo_workflows.models.io_argoproj_events_v1alpha1_event_persistence import IoArgoprojEventsV1alpha1EventPersistence +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_spec import IoArgoprojEventsV1alpha1EventSourceSpec +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_status import IoArgoprojEventsV1alpha1EventSourceStatus +from argo_workflows.models.io_argoproj_events_v1alpha1_expr_filter import IoArgoprojEventsV1alpha1ExprFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_file_artifact import IoArgoprojEventsV1alpha1FileArtifact +from argo_workflows.models.io_argoproj_events_v1alpha1_file_event_source import IoArgoprojEventsV1alpha1FileEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_generic_event_source import IoArgoprojEventsV1alpha1GenericEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_gerrit_event_source import IoArgoprojEventsV1alpha1GerritEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_git_artifact import IoArgoprojEventsV1alpha1GitArtifact +from argo_workflows.models.io_argoproj_events_v1alpha1_git_creds import IoArgoprojEventsV1alpha1GitCreds +from argo_workflows.models.io_argoproj_events_v1alpha1_git_remote_config import IoArgoprojEventsV1alpha1GitRemoteConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_github_app_creds import IoArgoprojEventsV1alpha1GithubAppCreds +from argo_workflows.models.io_argoproj_events_v1alpha1_github_event_source import IoArgoprojEventsV1alpha1GithubEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_gitlab_event_source import IoArgoprojEventsV1alpha1GitlabEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_hdfs_event_source import IoArgoprojEventsV1alpha1HDFSEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_http_trigger import IoArgoprojEventsV1alpha1HTTPTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_int64_or_string import IoArgoprojEventsV1alpha1Int64OrString +from argo_workflows.models.io_argoproj_events_v1alpha1_k8_s_resource_policy import IoArgoprojEventsV1alpha1K8SResourcePolicy +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_consumer_group import IoArgoprojEventsV1alpha1KafkaConsumerGroup +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_event_source import IoArgoprojEventsV1alpha1KafkaEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_trigger import IoArgoprojEventsV1alpha1KafkaTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_log_trigger import IoArgoprojEventsV1alpha1LogTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_mqtt_event_source import IoArgoprojEventsV1alpha1MQTTEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_metadata import IoArgoprojEventsV1alpha1Metadata +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_auth import IoArgoprojEventsV1alpha1NATSAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_events_source import IoArgoprojEventsV1alpha1NATSEventsSource +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_trigger import IoArgoprojEventsV1alpha1NATSTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_nsq_event_source import IoArgoprojEventsV1alpha1NSQEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_open_whisk_trigger import IoArgoprojEventsV1alpha1OpenWhiskTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_owned_repositories import IoArgoprojEventsV1alpha1OwnedRepositories +from argo_workflows.models.io_argoproj_events_v1alpha1_payload_field import IoArgoprojEventsV1alpha1PayloadField +from argo_workflows.models.io_argoproj_events_v1alpha1_pub_sub_event_source import IoArgoprojEventsV1alpha1PubSubEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_pulsar_event_source import IoArgoprojEventsV1alpha1PulsarEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_pulsar_trigger import IoArgoprojEventsV1alpha1PulsarTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_rate_limit import IoArgoprojEventsV1alpha1RateLimit +from argo_workflows.models.io_argoproj_events_v1alpha1_redis_event_source import IoArgoprojEventsV1alpha1RedisEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_redis_stream_event_source import IoArgoprojEventsV1alpha1RedisStreamEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_resource import IoArgoprojEventsV1alpha1Resource +from argo_workflows.models.io_argoproj_events_v1alpha1_resource_event_source import IoArgoprojEventsV1alpha1ResourceEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_resource_filter import IoArgoprojEventsV1alpha1ResourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_bucket import IoArgoprojEventsV1alpha1S3Bucket +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_filter import IoArgoprojEventsV1alpha1S3Filter +from argo_workflows.models.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_sftp_event_source import IoArgoprojEventsV1alpha1SFTPEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_sns_event_source import IoArgoprojEventsV1alpha1SNSEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_sqs_event_source import IoArgoprojEventsV1alpha1SQSEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_schema_registry_config import IoArgoprojEventsV1alpha1SchemaRegistryConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_secure_header import IoArgoprojEventsV1alpha1SecureHeader +from argo_workflows.models.io_argoproj_events_v1alpha1_selector import IoArgoprojEventsV1alpha1Selector +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_spec import IoArgoprojEventsV1alpha1SensorSpec +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_status import IoArgoprojEventsV1alpha1SensorStatus +from argo_workflows.models.io_argoproj_events_v1alpha1_service import IoArgoprojEventsV1alpha1Service +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_event_source import IoArgoprojEventsV1alpha1SlackEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_sender import IoArgoprojEventsV1alpha1SlackSender +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_thread import IoArgoprojEventsV1alpha1SlackThread +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_trigger import IoArgoprojEventsV1alpha1SlackTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_standard_k8_s_trigger import IoArgoprojEventsV1alpha1StandardK8STrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status +from argo_workflows.models.io_argoproj_events_v1alpha1_status_policy import IoArgoprojEventsV1alpha1StatusPolicy +from argo_workflows.models.io_argoproj_events_v1alpha1_storage_grid_event_source import IoArgoprojEventsV1alpha1StorageGridEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_storage_grid_filter import IoArgoprojEventsV1alpha1StorageGridFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_stripe_event_source import IoArgoprojEventsV1alpha1StripeEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template +from argo_workflows.models.io_argoproj_events_v1alpha1_time_filter import IoArgoprojEventsV1alpha1TimeFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger import IoArgoprojEventsV1alpha1Trigger +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter_source import IoArgoprojEventsV1alpha1TriggerParameterSource +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_policy import IoArgoprojEventsV1alpha1TriggerPolicy +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_template import IoArgoprojEventsV1alpha1TriggerTemplate +from argo_workflows.models.io_argoproj_events_v1alpha1_url_artifact import IoArgoprojEventsV1alpha1URLArtifact +from argo_workflows.models.io_argoproj_events_v1alpha1_value_from_source import IoArgoprojEventsV1alpha1ValueFromSource +from argo_workflows.models.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_event_source import IoArgoprojEventsV1alpha1WebhookEventSource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments +from argo_workflows.models.io_argoproj_workflow_v1alpha1_art_gc_status import IoArgoprojWorkflowV1alpha1ArtGCStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc_spec import IoArgoprojWorkflowV1alpha1ArtifactGCSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc_status import IoArgoprojWorkflowV1alpha1ArtifactGCStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_node_spec import IoArgoprojWorkflowV1alpha1ArtifactNodeSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_paths import IoArgoprojWorkflowV1alpha1ArtifactPaths +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository_ref import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository_ref_status import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_result import IoArgoprojWorkflowV1alpha1ArtifactResult +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_result_node_status import IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact_repository import IoArgoprojWorkflowV1alpha1AzureArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_backoff import IoArgoprojWorkflowV1alpha1Backoff +from argo_workflows.models.io_argoproj_workflow_v1alpha1_basic_auth import IoArgoprojWorkflowV1alpha1BasicAuth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cache import IoArgoprojWorkflowV1alpha1Cache +from argo_workflows.models.io_argoproj_workflow_v1alpha1_client_cert_auth import IoArgoprojWorkflowV1alpha1ClientCertAuth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_list import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_column import IoArgoprojWorkflowV1alpha1Column +from argo_workflows.models.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_node import IoArgoprojWorkflowV1alpha1ContainerNode +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_set_retry_strategy import IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_set_template import IoArgoprojWorkflowV1alpha1ContainerSetTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_continue_on import IoArgoprojWorkflowV1alpha1ContinueOn +from argo_workflows.models.io_argoproj_workflow_v1alpha1_counter import IoArgoprojWorkflowV1alpha1Counter +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_cron_workflow_request import IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_s3_bucket_options import IoArgoprojWorkflowV1alpha1CreateS3BucketOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_list import IoArgoprojWorkflowV1alpha1CronWorkflowList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_resume_request import IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_spec import IoArgoprojWorkflowV1alpha1CronWorkflowSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_status import IoArgoprojWorkflowV1alpha1CronWorkflowStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request import IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_dag_task import IoArgoprojWorkflowV1alpha1DAGTask +from argo_workflows.models.io_argoproj_workflow_v1alpha1_dag_template import IoArgoprojWorkflowV1alpha1DAGTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_data import IoArgoprojWorkflowV1alpha1Data +from argo_workflows.models.io_argoproj_workflow_v1alpha1_data_source import IoArgoprojWorkflowV1alpha1DataSource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_event import IoArgoprojWorkflowV1alpha1Event +from argo_workflows.models.io_argoproj_workflow_v1alpha1_executor_config import IoArgoprojWorkflowV1alpha1ExecutorConfig +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact_repository import IoArgoprojWorkflowV1alpha1GCSArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gauge import IoArgoprojWorkflowV1alpha1Gauge +from argo_workflows.models.io_argoproj_workflow_v1alpha1_get_user_info_response import IoArgoprojWorkflowV1alpha1GetUserInfoResponse +from argo_workflows.models.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact_repository import IoArgoprojWorkflowV1alpha1HDFSArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http import IoArgoprojWorkflowV1alpha1HTTP +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_auth import IoArgoprojWorkflowV1alpha1HTTPAuth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_body_source import IoArgoprojWorkflowV1alpha1HTTPBodySource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_header import IoArgoprojWorkflowV1alpha1HTTPHeader +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_header_source import IoArgoprojWorkflowV1alpha1HTTPHeaderSource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_header import IoArgoprojWorkflowV1alpha1Header +from argo_workflows.models.io_argoproj_workflow_v1alpha1_histogram import IoArgoprojWorkflowV1alpha1Histogram +from argo_workflows.models.io_argoproj_workflow_v1alpha1_info_response import IoArgoprojWorkflowV1alpha1InfoResponse +from argo_workflows.models.io_argoproj_workflow_v1alpha1_inputs import IoArgoprojWorkflowV1alpha1Inputs +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_keys import IoArgoprojWorkflowV1alpha1LabelKeys +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_value_from import IoArgoprojWorkflowV1alpha1LabelValueFrom +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_values import IoArgoprojWorkflowV1alpha1LabelValues +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook +from argo_workflows.models.io_argoproj_workflow_v1alpha1_link import IoArgoprojWorkflowV1alpha1Link +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lint_cron_workflow_request import IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_log_entry import IoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.models.io_argoproj_workflow_v1alpha1_manifest_from import IoArgoprojWorkflowV1alpha1ManifestFrom +from argo_workflows.models.io_argoproj_workflow_v1alpha1_memoization_status import IoArgoprojWorkflowV1alpha1MemoizationStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_memoize import IoArgoprojWorkflowV1alpha1Memoize +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metric_label import IoArgoprojWorkflowV1alpha1MetricLabel +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metrics import IoArgoprojWorkflowV1alpha1Metrics +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex import IoArgoprojWorkflowV1alpha1Mutex +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex_holding import IoArgoprojWorkflowV1alpha1MutexHolding +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex_status import IoArgoprojWorkflowV1alpha1MutexStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_flag import IoArgoprojWorkflowV1alpha1NodeFlag +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_result import IoArgoprojWorkflowV1alpha1NodeResult +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_status import IoArgoprojWorkflowV1alpha1NodeStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_synchronization_status import IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_o_auth2_auth import IoArgoprojWorkflowV1alpha1OAuth2Auth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param import IoArgoprojWorkflowV1alpha1OAuth2EndpointParam +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact_repository import IoArgoprojWorkflowV1alpha1OSSArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_lifecycle_rule import IoArgoprojWorkflowV1alpha1OSSLifecycleRule +from argo_workflows.models.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parallel_steps import IoArgoprojWorkflowV1alpha1ParallelSteps +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter +from argo_workflows.models.io_argoproj_workflow_v1alpha1_pod_gc import IoArgoprojWorkflowV1alpha1PodGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_prometheus import IoArgoprojWorkflowV1alpha1Prometheus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_resource_template import IoArgoprojWorkflowV1alpha1ResourceTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request import IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_affinity import IoArgoprojWorkflowV1alpha1RetryAffinity +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_archived_workflow_request import IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_strategy import IoArgoprojWorkflowV1alpha1RetryStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact_repository import IoArgoprojWorkflowV1alpha1S3ArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_encryption_options import IoArgoprojWorkflowV1alpha1S3EncryptionOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_script_template import IoArgoprojWorkflowV1alpha1ScriptTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_holding import IoArgoprojWorkflowV1alpha1SemaphoreHolding +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_ref import IoArgoprojWorkflowV1alpha1SemaphoreRef +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_status import IoArgoprojWorkflowV1alpha1SemaphoreStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_sequence import IoArgoprojWorkflowV1alpha1Sequence +from argo_workflows.models.io_argoproj_workflow_v1alpha1_stop_strategy import IoArgoprojWorkflowV1alpha1StopStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_submit import IoArgoprojWorkflowV1alpha1Submit +from argo_workflows.models.io_argoproj_workflow_v1alpha1_submit_opts import IoArgoprojWorkflowV1alpha1SubmitOpts +from argo_workflows.models.io_argoproj_workflow_v1alpha1_suspend_template import IoArgoprojWorkflowV1alpha1SuspendTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_synchronization import IoArgoprojWorkflowV1alpha1Synchronization +from argo_workflows.models.io_argoproj_workflow_v1alpha1_synchronization_status import IoArgoprojWorkflowV1alpha1SynchronizationStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_ttl_strategy import IoArgoprojWorkflowV1alpha1TTLStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_tar_strategy import IoArgoprojWorkflowV1alpha1TarStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef +from argo_workflows.models.io_argoproj_workflow_v1alpha1_transformation_step import IoArgoprojWorkflowV1alpha1TransformationStep +from argo_workflows.models.io_argoproj_workflow_v1alpha1_update_cron_workflow_request import IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_user_container import IoArgoprojWorkflowV1alpha1UserContainer +from argo_workflows.models.io_argoproj_workflow_v1alpha1_value_from import IoArgoprojWorkflowV1alpha1ValueFrom +from argo_workflows.models.io_argoproj_workflow_v1alpha1_version import IoArgoprojWorkflowV1alpha1Version +from argo_workflows.models.io_argoproj_workflow_v1alpha1_volume_claim_gc import IoArgoprojWorkflowV1alpha1VolumeClaimGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_create_request import IoArgoprojWorkflowV1alpha1WorkflowCreateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding import IoArgoprojWorkflowV1alpha1WorkflowEventBinding +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding_list import IoArgoprojWorkflowV1alpha1WorkflowEventBindingList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding_spec import IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc import IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_lint_request import IoArgoprojWorkflowV1alpha1WorkflowLintRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_metadata import IoArgoprojWorkflowV1alpha1WorkflowMetadata +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resubmit_request import IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resume_request import IoArgoprojWorkflowV1alpha1WorkflowResumeRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_retry_request import IoArgoprojWorkflowV1alpha1WorkflowRetryRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_set_request import IoArgoprojWorkflowV1alpha1WorkflowSetRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_status import IoArgoprojWorkflowV1alpha1WorkflowStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_step import IoArgoprojWorkflowV1alpha1WorkflowStep +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_stop_request import IoArgoprojWorkflowV1alpha1WorkflowStopRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_submit_request import IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_suspend_request import IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_task_set_spec import IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_task_set_status import IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_create_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_list import IoArgoprojWorkflowV1alpha1WorkflowTemplateList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_ref import IoArgoprojWorkflowV1alpha1WorkflowTemplateRef +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_update_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_terminate_request import IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_watch_event import IoArgoprojWorkflowV1alpha1WorkflowWatchEvent +from argo_workflows.models.io_k8s_api_policy_v1_pod_disruption_budget_spec import IoK8sApiPolicyV1PodDisruptionBudgetSpec +from argo_workflows.models.key_to_path import KeyToPath +from argo_workflows.models.label_selector import LabelSelector +from argo_workflows.models.label_selector_requirement import LabelSelectorRequirement +from argo_workflows.models.lifecycle import Lifecycle +from argo_workflows.models.lifecycle_handler import LifecycleHandler +from argo_workflows.models.list_meta import ListMeta +from argo_workflows.models.local_object_reference import LocalObjectReference +from argo_workflows.models.managed_fields_entry import ManagedFieldsEntry +from argo_workflows.models.nfs_volume_source import NFSVolumeSource +from argo_workflows.models.node_affinity import NodeAffinity +from argo_workflows.models.node_selector import NodeSelector +from argo_workflows.models.node_selector_requirement import NodeSelectorRequirement +from argo_workflows.models.node_selector_term import NodeSelectorTerm +from argo_workflows.models.object_field_selector import ObjectFieldSelector +from argo_workflows.models.object_meta import ObjectMeta +from argo_workflows.models.object_reference import ObjectReference +from argo_workflows.models.owner_reference import OwnerReference +from argo_workflows.models.persistent_volume_claim import PersistentVolumeClaim +from argo_workflows.models.persistent_volume_claim_condition import PersistentVolumeClaimCondition +from argo_workflows.models.persistent_volume_claim_spec import PersistentVolumeClaimSpec +from argo_workflows.models.persistent_volume_claim_status import PersistentVolumeClaimStatus +from argo_workflows.models.persistent_volume_claim_template import PersistentVolumeClaimTemplate +from argo_workflows.models.persistent_volume_claim_volume_source import PersistentVolumeClaimVolumeSource +from argo_workflows.models.photon_persistent_disk_volume_source import PhotonPersistentDiskVolumeSource +from argo_workflows.models.pod_affinity import PodAffinity +from argo_workflows.models.pod_affinity_term import PodAffinityTerm +from argo_workflows.models.pod_anti_affinity import PodAntiAffinity +from argo_workflows.models.pod_dns_config import PodDNSConfig +from argo_workflows.models.pod_dns_config_option import PodDNSConfigOption +from argo_workflows.models.pod_security_context import PodSecurityContext +from argo_workflows.models.portworx_volume_source import PortworxVolumeSource +from argo_workflows.models.preferred_scheduling_term import PreferredSchedulingTerm +from argo_workflows.models.probe import Probe +from argo_workflows.models.projected_volume_source import ProjectedVolumeSource +from argo_workflows.models.quobyte_volume_source import QuobyteVolumeSource +from argo_workflows.models.rbd_volume_source import RBDVolumeSource +from argo_workflows.models.resource_field_selector import ResourceFieldSelector +from argo_workflows.models.resource_requirements import ResourceRequirements +from argo_workflows.models.se_linux_options import SELinuxOptions +from argo_workflows.models.scale_io_volume_source import ScaleIOVolumeSource +from argo_workflows.models.seccomp_profile import SeccompProfile +from argo_workflows.models.secret_env_source import SecretEnvSource +from argo_workflows.models.secret_key_selector import SecretKeySelector +from argo_workflows.models.secret_projection import SecretProjection +from argo_workflows.models.secret_volume_source import SecretVolumeSource +from argo_workflows.models.security_context import SecurityContext +from argo_workflows.models.sensor_create_sensor_request import SensorCreateSensorRequest +from argo_workflows.models.sensor_log_entry import SensorLogEntry +from argo_workflows.models.sensor_sensor_watch_event import SensorSensorWatchEvent +from argo_workflows.models.sensor_update_sensor_request import SensorUpdateSensorRequest +from argo_workflows.models.service_account_token_projection import ServiceAccountTokenProjection +from argo_workflows.models.service_port import ServicePort +from argo_workflows.models.status_cause import StatusCause +from argo_workflows.models.storage_os_volume_source import StorageOSVolumeSource +from argo_workflows.models.stream_result_of_event import StreamResultOfEvent +from argo_workflows.models.stream_result_of_eventsource_event_source_watch_event import StreamResultOfEventsourceEventSourceWatchEvent +from argo_workflows.models.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event import StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent +from argo_workflows.models.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry +from argo_workflows.models.stream_result_of_sensor_sensor_watch_event import StreamResultOfSensorSensorWatchEvent +from argo_workflows.models.sysctl import Sysctl +from argo_workflows.models.tcp_socket_action import TCPSocketAction +from argo_workflows.models.toleration import Toleration +from argo_workflows.models.typed_local_object_reference import TypedLocalObjectReference +from argo_workflows.models.volume import Volume +from argo_workflows.models.volume_device import VolumeDevice +from argo_workflows.models.volume_mount import VolumeMount +from argo_workflows.models.volume_projection import VolumeProjection +from argo_workflows.models.vsphere_virtual_disk_volume_source import VsphereVirtualDiskVolumeSource +from argo_workflows.models.weighted_pod_affinity_term import WeightedPodAffinityTerm +from argo_workflows.models.windows_security_context_options import WindowsSecurityContextOptions diff --git a/sdks/python/client/argo_workflows/api/__init__.py b/sdks/python/client/argo_workflows/api/__init__.py index 2bd817d4ac7b..894890284a3d 100644 --- a/sdks/python/client/argo_workflows/api/__init__.py +++ b/sdks/python/client/argo_workflows/api/__init__.py @@ -1,3 +1,14 @@ -# do not import all apis into this module because that uses a lot of memory and stack frames -# if you need the ability to import all apis from one package, import them with -# from argo_workflows.apis import ArchivedWorkflowServiceApi +# flake8: noqa + +# import apis into api package +from argo_workflows.api.archived_workflow_service_api import ArchivedWorkflowServiceApi +from argo_workflows.api.artifact_service_api import ArtifactServiceApi +from argo_workflows.api.cluster_workflow_template_service_api import ClusterWorkflowTemplateServiceApi +from argo_workflows.api.cron_workflow_service_api import CronWorkflowServiceApi +from argo_workflows.api.event_service_api import EventServiceApi +from argo_workflows.api.event_source_service_api import EventSourceServiceApi +from argo_workflows.api.info_service_api import InfoServiceApi +from argo_workflows.api.sensor_service_api import SensorServiceApi +from argo_workflows.api.workflow_service_api import WorkflowServiceApi +from argo_workflows.api.workflow_template_service_api import WorkflowTemplateServiceApi + diff --git a/sdks/python/client/argo_workflows/api/archived_workflow_service_api.py b/sdks/python/client/argo_workflows/api/archived_workflow_service_api.py index 7f63506edef9..83ed0e06bc22 100644 --- a/sdks/python/client/argo_workflows/api/archived_workflow_service_api.py +++ b/sdks/python/client/argo_workflows/api/archived_workflow_service_api.py @@ -1,1076 +1,2265 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated +from pydantic import Field, StrictBool, StrictStr +from typing import Optional +from typing_extensions import Annotated +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_keys import IoArgoprojWorkflowV1alpha1LabelKeys +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_values import IoArgoprojWorkflowV1alpha1LabelValues +from argo_workflows.models.io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request import IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_archived_workflow_request import IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_label_keys import IoArgoprojWorkflowV1alpha1LabelKeys -from argo_workflows.model.io_argoproj_workflow_v1alpha1_label_values import IoArgoprojWorkflowV1alpha1LabelValues -from argo_workflows.model.io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request import IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_retry_archived_workflow_request import IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList - - -class ArchivedWorkflowServiceApi(object): +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType + + +class ArchivedWorkflowServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.delete_archived_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/archived-workflows/{uid}', - 'operation_id': 'delete_archived_workflow', - 'http_method': 'DELETE', - 'servers': None, - }, - params_map={ - 'all': [ - 'uid', - 'namespace', - ], - 'required': [ - 'uid', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'uid': - (str,), - 'namespace': - (str,), - }, - 'attribute_map': { - 'uid': 'uid', - 'namespace': 'namespace', - }, - 'location_map': { - 'uid': 'path', - 'namespace': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + + @validate_call + def delete_archived_workflow( + self, + uid: StrictStr, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """delete_archived_workflow + + + :param uid: (required) + :type uid: str + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_archived_workflow_serialize( + uid=uid, + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.get_archived_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/archived-workflows/{uid}', - 'operation_id': 'get_archived_workflow', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'uid', - 'namespace', - 'name', - ], - 'required': [ - 'uid', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'uid': - (str,), - 'namespace': - (str,), - 'name': - (str,), - }, - 'attribute_map': { - 'uid': 'uid', - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'uid': 'path', - 'namespace': 'query', - 'name': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - self.list_archived_workflow_label_keys_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1LabelKeys,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/archived-workflows-label-keys', - 'operation_id': 'list_archived_workflow_label_keys', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - ], - 'required': [], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_archived_workflow_with_http_info( + self, + uid: StrictStr, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """delete_archived_workflow + + + :param uid: (required) + :type uid: str + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_archived_workflow_serialize( + uid=uid, + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.list_archived_workflow_label_values_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1LabelValues,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/archived-workflows-label-values', - 'operation_id': 'list_archived_workflow_label_values', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - 'namespace', - ], - 'required': [], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - 'namespace': - (str,), - }, - 'attribute_map': { - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - 'namespace': 'namespace', - }, - 'location_map': { - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - 'namespace': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - self.list_archived_workflows_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowList,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/archived-workflows', - 'operation_id': 'list_archived_workflows', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - 'name_prefix', - 'namespace', - ], - 'required': [], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - 'name_prefix': - (str,), - 'namespace': - (str,), - }, - 'attribute_map': { - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - 'name_prefix': 'namePrefix', - 'namespace': 'namespace', - }, - 'location_map': { - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - 'name_prefix': 'query', - 'namespace': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - self.resubmit_archived_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/archived-workflows/{uid}/resubmit', - 'operation_id': 'resubmit_archived_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'uid', - 'body', - ], - 'required': [ - 'uid', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'uid': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest,), - }, - 'attribute_map': { - 'uid': 'uid', - }, - 'location_map': { - 'uid': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client + + + @validate_call + def delete_archived_workflow_without_preload_content( + self, + uid: StrictStr, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete_archived_workflow + + + :param uid: (required) + :type uid: str + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_archived_workflow_serialize( + uid=uid, + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.retry_archived_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/archived-workflows/{uid}/retry', - 'operation_id': 'retry_archived_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'uid', - 'body', - ], - 'required': [ - 'uid', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'uid': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest,), - }, - 'attribute_map': { - 'uid': 'uid', - }, - 'location_map': { - 'uid': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - def delete_archived_workflow( + + def _delete_archived_workflow_serialize( self, uid, - **kwargs - ): - """delete_archived_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_archived_workflow(uid, async_req=True) - >>> result = thread.get() - - Args: - uid (str): - - Keyword Args: - namespace (str): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + namespace, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if uid is not None: + _path_params['uid'] = uid + # process the query parameters + if namespace is not None: + + _query_params.append(('namespace', namespace)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/api/v1/archived-workflows/{uid}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def get_archived_workflow( + self, + uid: StrictStr, + namespace: Optional[StrictStr] = None, + name: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """get_archived_workflow + + + :param uid: (required) + :type uid: str + :param namespace: + :type namespace: str + :param name: + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_archived_workflow_serialize( + uid=uid, + namespace=namespace, + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_archived_workflow_with_http_info( + self, + uid: StrictStr, + namespace: Optional[StrictStr] = None, + name: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """get_archived_workflow + + + :param uid: (required) + :type uid: str + :param namespace: + :type namespace: str + :param name: + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_archived_workflow_serialize( + uid=uid, + namespace=namespace, + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['uid'] = \ - uid - return self.delete_archived_workflow_endpoint.call_with_http_info(**kwargs) - def get_archived_workflow( + + @validate_call + def get_archived_workflow_without_preload_content( self, - uid, - **kwargs - ): - """get_archived_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_archived_workflow(uid, async_req=True) - >>> result = thread.get() - - Args: - uid (str): - - Keyword Args: - namespace (str): [optional] - name (str): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + uid: StrictStr, + namespace: Optional[StrictStr] = None, + name: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_archived_workflow + + + :param uid: (required) + :type uid: str + :param namespace: + :type namespace: str + :param name: + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_archived_workflow_serialize( + uid=uid, + namespace=namespace, + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + return response_data.response + + + def _get_archived_workflow_serialize( + self, + uid, + namespace, + name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if uid is not None: + _path_params['uid'] = uid + # process the query parameters + if namespace is not None: + + _query_params.append(('namespace', namespace)) + + if name is not None: + + _query_params.append(('name', name)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/archived-workflows/{uid}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + + + + @validate_call + def list_archived_workflow_label_keys( + self, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1LabelKeys: + """list_archived_workflow_label_keys + + + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_archived_workflow_label_keys_serialize( + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1LabelKeys", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['uid'] = \ - uid - return self.get_archived_workflow_endpoint.call_with_http_info(**kwargs) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data - def list_archived_workflow_label_keys( + + @validate_call + def list_archived_workflow_label_keys_with_http_info( self, - **kwargs - ): - """list_archived_workflow_label_keys # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_archived_workflow_label_keys(async_req=True) - >>> result = thread.get() - - - Keyword Args: - namespace (str): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1LabelKeys - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1LabelKeys]: + """list_archived_workflow_label_keys + + + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_archived_workflow_label_keys_serialize( + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1LabelKeys", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + + @validate_call + def list_archived_workflow_label_keys_without_preload_content( + self, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_archived_workflow_label_keys + + + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_archived_workflow_label_keys_serialize( + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1LabelKeys", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + return response_data.response + + + def _list_archived_workflow_label_keys_serialize( + self, + namespace, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if namespace is not None: + + _query_params.append(('namespace', namespace)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/archived-workflows-label-keys', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - return self.list_archived_workflow_label_keys_endpoint.call_with_http_info(**kwargs) + + + + @validate_call def list_archived_workflow_label_values( self, - **kwargs - ): - """list_archived_workflow_label_values # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_archived_workflow_label_values(async_req=True) - >>> result = thread.get() - - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - namespace (str): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1LabelValues - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1LabelValues: + """list_archived_workflow_label_values + + + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_archived_workflow_label_values_serialize( + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1LabelValues", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_archived_workflow_label_values_with_http_info( + self, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1LabelValues]: + """list_archived_workflow_label_values + + + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_archived_workflow_label_values_serialize( + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1LabelValues", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + + @validate_call + def list_archived_workflow_label_values_without_preload_content( + self, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_archived_workflow_label_values + + + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_archived_workflow_label_values_serialize( + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1LabelValues", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + return response_data.response + + + def _list_archived_workflow_label_values_serialize( + self, + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + namespace, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + if namespace is not None: + + _query_params.append(('namespace', namespace)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/archived-workflows-label-values', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - return self.list_archived_workflow_label_values_endpoint.call_with_http_info(**kwargs) + + + + @validate_call def list_archived_workflows( self, - **kwargs - ): - """list_archived_workflows # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_archived_workflows(async_req=True) - >>> result = thread.get() - - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - name_prefix (str): [optional] - namespace (str): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1WorkflowList - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + name_prefix: Optional[StrictStr] = None, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1WorkflowList: + """list_archived_workflows + + + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param name_prefix: + :type name_prefix: str + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_archived_workflows_serialize( + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + name_prefix=name_prefix, + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_archived_workflows_with_http_info( + self, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + name_prefix: Optional[StrictStr] = None, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1WorkflowList]: + """list_archived_workflows + + + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param name_prefix: + :type name_prefix: str + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_archived_workflows_serialize( + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + name_prefix=name_prefix, + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + + @validate_call + def list_archived_workflows_without_preload_content( + self, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + name_prefix: Optional[StrictStr] = None, + namespace: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_archived_workflows + + + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param name_prefix: + :type name_prefix: str + :param namespace: + :type namespace: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_archived_workflows_serialize( + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + name_prefix=name_prefix, + namespace=namespace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_archived_workflows_serialize( + self, + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + name_prefix, + namespace, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + if name_prefix is not None: + + _query_params.append(('namePrefix', name_prefix)) + + if namespace is not None: + + _query_params.append(('namespace', namespace)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/archived-workflows', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - return self.list_archived_workflows_endpoint.call_with_http_info(**kwargs) + + + + @validate_call def resubmit_archived_workflow( self, - uid, - body, - **kwargs - ): - """resubmit_archived_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.resubmit_archived_workflow(uid, body, async_req=True) - >>> result = thread.get() - - Args: - uid (str): - body (IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + uid: StrictStr, + body: IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """resubmit_archived_workflow + + + :param uid: (required) + :type uid: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resubmit_archived_workflow_serialize( + uid=uid, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def resubmit_archived_workflow_with_http_info( + self, + uid: StrictStr, + body: IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """resubmit_archived_workflow + + + :param uid: (required) + :type uid: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resubmit_archived_workflow_serialize( + uid=uid, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + + @validate_call + def resubmit_archived_workflow_without_preload_content( + self, + uid: StrictStr, + body: IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """resubmit_archived_workflow + + + :param uid: (required) + :type uid: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resubmit_archived_workflow_serialize( + uid=uid, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['uid'] = \ - uid - kwargs['body'] = \ - body - return self.resubmit_archived_workflow_endpoint.call_with_http_info(**kwargs) + return response_data.response - def retry_archived_workflow( + + def _resubmit_archived_workflow_serialize( self, uid, body, - **kwargs - ): - """retry_archived_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.retry_archived_workflow(uid, body, async_req=True) - >>> result = thread.get() - - Args: - uid (str): - body (IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if uid is not None: + _path_params['uid'] = uid + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/archived-workflows/{uid}/resubmit', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def retry_archived_workflow( + self, + uid: StrictStr, + body: IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """retry_archived_workflow + + + :param uid: (required) + :type uid: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._retry_archived_workflow_serialize( + uid=uid, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def retry_archived_workflow_with_http_info( + self, + uid: StrictStr, + body: IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """retry_archived_workflow + + + :param uid: (required) + :type uid: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._retry_archived_workflow_serialize( + uid=uid, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def retry_archived_workflow_without_preload_content( + self, + uid: StrictStr, + body: IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """retry_archived_workflow + + + :param uid: (required) + :type uid: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._retry_archived_workflow_serialize( + uid=uid, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + return response_data.response + + + def _retry_archived_workflow_serialize( + self, + uid, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if uid is not None: + _path_params['uid'] = uid + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/archived-workflows/{uid}/retry', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['uid'] = \ - uid - kwargs['body'] = \ - body - return self.retry_archived_workflow_endpoint.call_with_http_info(**kwargs) + diff --git a/sdks/python/client/argo_workflows/api/artifact_service_api.py b/sdks/python/client/argo_workflows/api/artifact_service_api.py index 29b2d6294b18..13277f6aba70 100644 --- a/sdks/python/client/argo_workflows/api/artifact_service_api.py +++ b/sdks/python/client/argo_workflows/api/artifact_service_api.py @@ -1,839 +1,1536 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated -import re # noqa: F401 -import sys # noqa: F401 +from pydantic import StrictStr, field_validator -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType -class ArtifactServiceApi(object): +class ArtifactServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.get_artifact_file_endpoint = _Endpoint( - settings={ - 'response_type': (file_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/artifact-files/{namespace}/{idDiscriminator}/{id}/{nodeId}/{artifactDiscriminator}/{artifactName}', - 'operation_id': 'get_artifact_file', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'id_discriminator', - 'id', - 'node_id', - 'artifact_name', - 'artifact_discriminator', - ], - 'required': [ - 'namespace', - 'id_discriminator', - 'id', - 'node_id', - 'artifact_name', - 'artifact_discriminator', - ], - 'nullable': [ - ], - 'enum': [ - 'id_discriminator', - 'artifact_discriminator', - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - ('id_discriminator',): { - - "WORKFLOW": "workflow", - "ARCHIVED-WORKFLOWS_": "archived-workflows " - }, - ('artifact_discriminator',): { - - "OUTPUTS": "outputs" - }, - }, - 'openapi_types': { - 'namespace': - (str,), - 'id_discriminator': - (str,), - 'id': - (str,), - 'node_id': - (str,), - 'artifact_name': - (str,), - 'artifact_discriminator': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'id_discriminator': 'idDiscriminator', - 'id': 'id', - 'node_id': 'nodeId', - 'artifact_name': 'artifactName', - 'artifact_discriminator': 'artifactDiscriminator', - }, - 'location_map': { - 'namespace': 'path', - 'id_discriminator': 'path', - 'id': 'path', - 'node_id': 'path', - 'artifact_name': 'path', - 'artifact_discriminator': 'path', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_input_artifact_endpoint = _Endpoint( - settings={ - 'response_type': (file_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/input-artifacts/{namespace}/{name}/{nodeId}/{artifactName}', - 'operation_id': 'get_input_artifact', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'node_id', - 'artifact_name', - ], - 'required': [ - 'namespace', - 'name', - 'node_id', - 'artifact_name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'node_id': - (str,), - 'artifact_name': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'node_id': 'nodeId', - 'artifact_name': 'artifactName', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'node_id': 'path', - 'artifact_name': 'path', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_input_artifact_by_uid_endpoint = _Endpoint( - settings={ - 'response_type': (file_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/input-artifacts-by-uid/{uid}/{nodeId}/{artifactName}', - 'operation_id': 'get_input_artifact_by_uid', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'uid', - 'node_id', - 'artifact_name', - ], - 'required': [ - 'uid', - 'node_id', - 'artifact_name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'uid': - (str,), - 'node_id': - (str,), - 'artifact_name': - (str,), - }, - 'attribute_map': { - 'uid': 'uid', - 'node_id': 'nodeId', - 'artifact_name': 'artifactName', - }, - 'location_map': { - 'uid': 'path', - 'node_id': 'path', - 'artifact_name': 'path', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_output_artifact_endpoint = _Endpoint( - settings={ - 'response_type': (file_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/artifacts/{namespace}/{name}/{nodeId}/{artifactName}', - 'operation_id': 'get_output_artifact', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'node_id', - 'artifact_name', - ], - 'required': [ - 'namespace', - 'name', - 'node_id', - 'artifact_name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'node_id': - (str,), - 'artifact_name': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'node_id': 'nodeId', - 'artifact_name': 'artifactName', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'node_id': 'path', - 'artifact_name': 'path', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_output_artifact_by_uid_endpoint = _Endpoint( - settings={ - 'response_type': (file_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/artifacts-by-uid/{uid}/{nodeId}/{artifactName}', - 'operation_id': 'get_output_artifact_by_uid', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'uid', - 'node_id', - 'artifact_name', - ], - 'required': [ - 'uid', - 'node_id', - 'artifact_name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'uid': - (str,), - 'node_id': - (str,), - 'artifact_name': - (str,), - }, - 'attribute_map': { - 'uid': 'uid', - 'node_id': 'nodeId', - 'artifact_name': 'artifactName', - }, - 'location_map': { - 'uid': 'path', - 'node_id': 'path', - 'artifact_name': 'path', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) + + @validate_call def get_artifact_file( + self, + namespace: StrictStr, + id_discriminator: StrictStr, + id: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + artifact_discriminator: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> bytearray: + """Get an artifact. + + + :param namespace: (required) + :type namespace: str + :param id_discriminator: (required) + :type id_discriminator: str + :param id: (required) + :type id: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param artifact_discriminator: (required) + :type artifact_discriminator: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_artifact_file_serialize( + namespace=namespace, + id_discriminator=id_discriminator, + id=id, + node_id=node_id, + artifact_name=artifact_name, + artifact_discriminator=artifact_discriminator, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_artifact_file_with_http_info( + self, + namespace: StrictStr, + id_discriminator: StrictStr, + id: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + artifact_discriminator: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[bytearray]: + """Get an artifact. + + + :param namespace: (required) + :type namespace: str + :param id_discriminator: (required) + :type id_discriminator: str + :param id: (required) + :type id: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param artifact_discriminator: (required) + :type artifact_discriminator: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_artifact_file_serialize( + namespace=namespace, + id_discriminator=id_discriminator, + id=id, + node_id=node_id, + artifact_name=artifact_name, + artifact_discriminator=artifact_discriminator, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_artifact_file_without_preload_content( + self, + namespace: StrictStr, + id_discriminator: StrictStr, + id: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + artifact_discriminator: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get an artifact. + + + :param namespace: (required) + :type namespace: str + :param id_discriminator: (required) + :type id_discriminator: str + :param id: (required) + :type id: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param artifact_discriminator: (required) + :type artifact_discriminator: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_artifact_file_serialize( + namespace=namespace, + id_discriminator=id_discriminator, + id=id, + node_id=node_id, + artifact_name=artifact_name, + artifact_discriminator=artifact_discriminator, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_artifact_file_serialize( self, namespace, id_discriminator, id, node_id, artifact_name, - artifact_discriminator="outputs", - **kwargs - ): - """Get an artifact. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_artifact_file(namespace, id_discriminator, id, node_id, artifact_name, artifact_discriminator="outputs", async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - id_discriminator (str): - id (str): - node_id (str): - artifact_name (str): - artifact_discriminator (str): defaults to "outputs", must be one of ["outputs"] - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - file_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['id_discriminator'] = \ - id_discriminator - kwargs['id'] = \ - id - kwargs['node_id'] = \ - node_id - kwargs['artifact_name'] = \ - artifact_name - kwargs['artifact_discriminator'] = \ - artifact_discriminator - return self.get_artifact_file_endpoint.call_with_http_info(**kwargs) + artifact_discriminator, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if id_discriminator is not None: + _path_params['idDiscriminator'] = id_discriminator + if id is not None: + _path_params['id'] = id + if node_id is not None: + _path_params['nodeId'] = node_id + if artifact_name is not None: + _path_params['artifactName'] = artifact_name + if artifact_discriminator is not None: + _path_params['artifactDiscriminator'] = artifact_discriminator + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/artifact-files/{namespace}/{idDiscriminator}/{id}/{nodeId}/{artifactDiscriminator}/{artifactName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def get_input_artifact( + self, + namespace: StrictStr, + name: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> bytearray: + """Get an input artifact. + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_input_artifact_serialize( + namespace=namespace, + name=name, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_input_artifact_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[bytearray]: + """Get an input artifact. + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_input_artifact_serialize( + namespace=namespace, + name=name, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_input_artifact_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get an input artifact. + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_input_artifact_serialize( + namespace=namespace, + name=name, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_input_artifact_serialize( self, namespace, name, node_id, artifact_name, - **kwargs - ): - """Get an input artifact. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_input_artifact(namespace, name, node_id, artifact_name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - node_id (str): - artifact_name (str): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - file_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['node_id'] = \ - node_id - kwargs['artifact_name'] = \ - artifact_name - return self.get_input_artifact_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + if node_id is not None: + _path_params['nodeId'] = node_id + if artifact_name is not None: + _path_params['artifactName'] = artifact_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/input-artifacts/{namespace}/{name}/{nodeId}/{artifactName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def get_input_artifact_by_uid( + self, + uid: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> bytearray: + """Get an input artifact by UID. + + + :param uid: (required) + :type uid: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_input_artifact_by_uid_serialize( + uid=uid, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_input_artifact_by_uid_with_http_info( + self, + uid: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[bytearray]: + """Get an input artifact by UID. + + + :param uid: (required) + :type uid: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_input_artifact_by_uid_serialize( + uid=uid, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_input_artifact_by_uid_without_preload_content( + self, + uid: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get an input artifact by UID. + + + :param uid: (required) + :type uid: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_input_artifact_by_uid_serialize( + uid=uid, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_input_artifact_by_uid_serialize( self, uid, node_id, artifact_name, - **kwargs - ): - """Get an input artifact by UID. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_input_artifact_by_uid(uid, node_id, artifact_name, async_req=True) - >>> result = thread.get() - - Args: - uid (str): - node_id (str): - artifact_name (str): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - file_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['uid'] = \ - uid - kwargs['node_id'] = \ - node_id - kwargs['artifact_name'] = \ - artifact_name - return self.get_input_artifact_by_uid_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if uid is not None: + _path_params['uid'] = uid + if node_id is not None: + _path_params['nodeId'] = node_id + if artifact_name is not None: + _path_params['artifactName'] = artifact_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/input-artifacts-by-uid/{uid}/{nodeId}/{artifactName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def get_output_artifact( + self, + namespace: StrictStr, + name: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> bytearray: + """Get an output artifact. + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_output_artifact_serialize( + namespace=namespace, + name=name, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_output_artifact_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[bytearray]: + """Get an output artifact. + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_output_artifact_serialize( + namespace=namespace, + name=name, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_output_artifact_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get an output artifact. + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_output_artifact_serialize( + namespace=namespace, + name=name, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_output_artifact_serialize( self, namespace, name, node_id, artifact_name, - **kwargs - ): - """Get an output artifact. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_output_artifact(namespace, name, node_id, artifact_name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - node_id (str): - artifact_name (str): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - file_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['node_id'] = \ - node_id - kwargs['artifact_name'] = \ - artifact_name - return self.get_output_artifact_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + if node_id is not None: + _path_params['nodeId'] = node_id + if artifact_name is not None: + _path_params['artifactName'] = artifact_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/artifacts/{namespace}/{name}/{nodeId}/{artifactName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def get_output_artifact_by_uid( + self, + uid: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> bytearray: + """Get an output artifact by UID. + + + :param uid: (required) + :type uid: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_output_artifact_by_uid_serialize( + uid=uid, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_output_artifact_by_uid_with_http_info( + self, + uid: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[bytearray]: + """Get an output artifact by UID. + + + :param uid: (required) + :type uid: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_output_artifact_by_uid_serialize( + uid=uid, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_output_artifact_by_uid_without_preload_content( + self, + uid: StrictStr, + node_id: StrictStr, + artifact_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get an output artifact by UID. + + + :param uid: (required) + :type uid: str + :param node_id: (required) + :type node_id: str + :param artifact_name: (required) + :type artifact_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_output_artifact_by_uid_serialize( + uid=uid, + node_id=node_id, + artifact_name=artifact_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "bytearray", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_output_artifact_by_uid_serialize( self, uid, node_id, artifact_name, - **kwargs - ): - """Get an output artifact by UID. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_output_artifact_by_uid(uid, node_id, artifact_name, async_req=True) - >>> result = thread.get() - - Args: - uid (str): - node_id (str): - artifact_name (str): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - file_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['uid'] = \ - uid - kwargs['node_id'] = \ - node_id - kwargs['artifact_name'] = \ - artifact_name - return self.get_output_artifact_by_uid_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if uid is not None: + _path_params['uid'] = uid + if node_id is not None: + _path_params['nodeId'] = node_id + if artifact_name is not None: + _path_params['artifactName'] = artifact_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/artifacts-by-uid/{uid}/{nodeId}/{artifactName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + diff --git a/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py b/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py index 5053a8f21aa3..7f603633a005 100644 --- a/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py +++ b/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py @@ -1,909 +1,1885 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated +from pydantic import Field, StrictBool, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_list import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_list import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest - - -class ClusterWorkflowTemplateServiceApi(object): +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType + + +class ClusterWorkflowTemplateServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.create_cluster_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cluster-workflow-templates', - 'operation_id': 'create_cluster_workflow_template', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'body', - ], - 'required': [ - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'body': - (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest,), - }, - 'attribute_map': { - }, - 'location_map': { - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.delete_cluster_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cluster-workflow-templates/{name}', - 'operation_id': 'delete_cluster_workflow_template', - 'http_method': 'DELETE', - 'servers': None, - }, - params_map={ - 'all': [ - 'name', - 'delete_options_grace_period_seconds', - 'delete_options_preconditions_uid', - 'delete_options_preconditions_resource_version', - 'delete_options_orphan_dependents', - 'delete_options_propagation_policy', - 'delete_options_dry_run', - ], - 'required': [ - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'name': - (str,), - 'delete_options_grace_period_seconds': - (str,), - 'delete_options_preconditions_uid': - (str,), - 'delete_options_preconditions_resource_version': - (str,), - 'delete_options_orphan_dependents': - (bool,), - 'delete_options_propagation_policy': - (str,), - 'delete_options_dry_run': - ([str],), - }, - 'attribute_map': { - 'name': 'name', - 'delete_options_grace_period_seconds': 'deleteOptions.gracePeriodSeconds', - 'delete_options_preconditions_uid': 'deleteOptions.preconditions.uid', - 'delete_options_preconditions_resource_version': 'deleteOptions.preconditions.resourceVersion', - 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', - 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', - 'delete_options_dry_run': 'deleteOptions.dryRun', - }, - 'location_map': { - 'name': 'path', - 'delete_options_grace_period_seconds': 'query', - 'delete_options_preconditions_uid': 'query', - 'delete_options_preconditions_resource_version': 'query', - 'delete_options_orphan_dependents': 'query', - 'delete_options_propagation_policy': 'query', - 'delete_options_dry_run': 'query', - }, - 'collection_format_map': { - 'delete_options_dry_run': 'multi', - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_cluster_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cluster-workflow-templates/{name}', - 'operation_id': 'get_cluster_workflow_template', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'name', - 'get_options_resource_version', - ], - 'required': [ - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'name': - (str,), - 'get_options_resource_version': - (str,), - }, - 'attribute_map': { - 'name': 'name', - 'get_options_resource_version': 'getOptions.resourceVersion', - }, - 'location_map': { - 'name': 'path', - 'get_options_resource_version': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.lint_cluster_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cluster-workflow-templates/lint', - 'operation_id': 'lint_cluster_workflow_template', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'body', - ], - 'required': [ - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'body': - (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest,), - }, - 'attribute_map': { - }, - 'location_map': { - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.list_cluster_workflow_templates_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cluster-workflow-templates', - 'operation_id': 'list_cluster_workflow_templates', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.update_cluster_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cluster-workflow-templates/{name}', - 'operation_id': 'update_cluster_workflow_template', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'name', - 'body', - ], - 'required': [ - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest,), - }, - 'attribute_map': { - 'name': 'name', - }, - 'location_map': { - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) + + @validate_call def create_cluster_workflow_template( + self, + body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate: + """create_cluster_workflow_template + + + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_cluster_workflow_template_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_cluster_workflow_template_with_http_info( + self, + body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate]: + """create_cluster_workflow_template + + + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_cluster_workflow_template_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_cluster_workflow_template_without_preload_content( + self, + body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create_cluster_workflow_template + + + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_cluster_workflow_template_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_cluster_workflow_template_serialize( self, body, - **kwargs - ): - """create_cluster_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_cluster_workflow_template(body, async_req=True) - >>> result = thread.get() - - Args: - body (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['body'] = \ - body - return self.create_cluster_workflow_template_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/cluster-workflow-templates', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def delete_cluster_workflow_template( + self, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """delete_cluster_workflow_template + + + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_cluster_workflow_template_serialize( + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_cluster_workflow_template_with_http_info( + self, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """delete_cluster_workflow_template + + + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_cluster_workflow_template_serialize( + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_cluster_workflow_template_without_preload_content( + self, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete_cluster_workflow_template + + + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_cluster_workflow_template_serialize( + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_cluster_workflow_template_serialize( self, name, - **kwargs - ): - """delete_cluster_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_cluster_workflow_template(name, async_req=True) - >>> result = thread.get() - - Args: - name (str): - - Keyword Args: - delete_options_grace_period_seconds (str): The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.. [optional] - delete_options_preconditions_uid (str): Specifies the target UID. +optional.. [optional] - delete_options_preconditions_resource_version (str): Specifies the target ResourceVersion +optional.. [optional] - delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] - delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] - delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['name'] = \ - name - return self.delete_cluster_workflow_template_endpoint.call_with_http_info(**kwargs) + delete_options_grace_period_seconds, + delete_options_preconditions_uid, + delete_options_preconditions_resource_version, + delete_options_orphan_dependents, + delete_options_propagation_policy, + delete_options_dry_run, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'deleteOptions.dryRun': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if name is not None: + _path_params['name'] = name + # process the query parameters + if delete_options_grace_period_seconds is not None: + + _query_params.append(('deleteOptions.gracePeriodSeconds', delete_options_grace_period_seconds)) + + if delete_options_preconditions_uid is not None: + + _query_params.append(('deleteOptions.preconditions.uid', delete_options_preconditions_uid)) + + if delete_options_preconditions_resource_version is not None: + + _query_params.append(('deleteOptions.preconditions.resourceVersion', delete_options_preconditions_resource_version)) + + if delete_options_orphan_dependents is not None: + + _query_params.append(('deleteOptions.orphanDependents', delete_options_orphan_dependents)) + + if delete_options_propagation_policy is not None: + + _query_params.append(('deleteOptions.propagationPolicy', delete_options_propagation_policy)) + + if delete_options_dry_run is not None: + + _query_params.append(('deleteOptions.dryRun', delete_options_dry_run)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/api/v1/cluster-workflow-templates/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def get_cluster_workflow_template( + self, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate: + """get_cluster_workflow_template + + + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cluster_workflow_template_serialize( + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_cluster_workflow_template_with_http_info( + self, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate]: + """get_cluster_workflow_template + + + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cluster_workflow_template_serialize( + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_cluster_workflow_template_without_preload_content( + self, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_cluster_workflow_template + + + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cluster_workflow_template_serialize( + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_cluster_workflow_template_serialize( self, name, - **kwargs - ): - """get_cluster_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_cluster_workflow_template(name, async_req=True) - >>> result = thread.get() - - Args: - name (str): - - Keyword Args: - get_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['name'] = \ - name - return self.get_cluster_workflow_template_endpoint.call_with_http_info(**kwargs) + get_options_resource_version, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if name is not None: + _path_params['name'] = name + # process the query parameters + if get_options_resource_version is not None: + + _query_params.append(('getOptions.resourceVersion', get_options_resource_version)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/cluster-workflow-templates/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def lint_cluster_workflow_template( + self, + body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate: + """lint_cluster_workflow_template + + + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_cluster_workflow_template_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def lint_cluster_workflow_template_with_http_info( + self, + body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate]: + """lint_cluster_workflow_template + + + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_cluster_workflow_template_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def lint_cluster_workflow_template_without_preload_content( + self, + body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """lint_cluster_workflow_template + + + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_cluster_workflow_template_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _lint_cluster_workflow_template_serialize( self, body, - **kwargs - ): - """lint_cluster_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.lint_cluster_workflow_template(body, async_req=True) - >>> result = thread.get() - - Args: - body (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['body'] = \ - body - return self.lint_cluster_workflow_template_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/cluster-workflow-templates/lint', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def list_cluster_workflow_templates( self, - **kwargs - ): - """list_cluster_workflow_templates # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_cluster_workflow_templates(async_req=True) - >>> result = thread.get() - - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - return self.list_cluster_workflow_templates_endpoint.call_with_http_info(**kwargs) + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList: + """list_cluster_workflow_templates + + + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_cluster_workflow_templates_serialize( + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_cluster_workflow_templates_with_http_info( + self, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList]: + """list_cluster_workflow_templates + + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_cluster_workflow_templates_serialize( + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_cluster_workflow_templates_without_preload_content( + self, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_cluster_workflow_templates + + + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_cluster_workflow_templates_serialize( + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_cluster_workflow_templates_serialize( + self, + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/cluster-workflow-templates', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def update_cluster_workflow_template( + self, + name: Annotated[StrictStr, Field(description="DEPRECATED: This field is ignored.")], + body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate: + """update_cluster_workflow_template + + + :param name: DEPRECATED: This field is ignored. (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_cluster_workflow_template_serialize( + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_cluster_workflow_template_with_http_info( + self, + name: Annotated[StrictStr, Field(description="DEPRECATED: This field is ignored.")], + body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate]: + """update_cluster_workflow_template + + + :param name: DEPRECATED: This field is ignored. (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_cluster_workflow_template_serialize( + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_cluster_workflow_template_without_preload_content( + self, + name: Annotated[StrictStr, Field(description="DEPRECATED: This field is ignored.")], + body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update_cluster_workflow_template + + + :param name: DEPRECATED: This field is ignored. (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_cluster_workflow_template_serialize( + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_cluster_workflow_template_serialize( self, name, body, - **kwargs - ): - """update_cluster_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_cluster_workflow_template(name, body, async_req=True) - >>> result = thread.get() - - Args: - name (str): DEPRECATED: This field is ignored. - body (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.update_cluster_workflow_template_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/cluster-workflow-templates/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + diff --git a/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py b/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py index e510d7300a69..06030c8ae2b2 100644 --- a/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py +++ b/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py @@ -1,1271 +1,2573 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated +from pydantic import Field, StrictBool, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_cron_workflow_request import IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_list import IoArgoprojWorkflowV1alpha1CronWorkflowList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_resume_request import IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request import IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lint_cron_workflow_request import IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_update_cron_workflow_request import IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_create_cron_workflow_request import IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_list import IoArgoprojWorkflowV1alpha1CronWorkflowList -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_resume_request import IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request import IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_lint_cron_workflow_request import IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_update_cron_workflow_request import IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest - - -class CronWorkflowServiceApi(object): +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType + + +class CronWorkflowServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.create_cron_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cron-workflows/{namespace}', - 'operation_id': 'create_cron_workflow', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'body', - ], - 'required': [ - 'namespace', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.delete_cron_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}', - 'operation_id': 'delete_cron_workflow', - 'http_method': 'DELETE', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'delete_options_grace_period_seconds', - 'delete_options_preconditions_uid', - 'delete_options_preconditions_resource_version', - 'delete_options_orphan_dependents', - 'delete_options_propagation_policy', - 'delete_options_dry_run', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'delete_options_grace_period_seconds': - (str,), - 'delete_options_preconditions_uid': - (str,), - 'delete_options_preconditions_resource_version': - (str,), - 'delete_options_orphan_dependents': - (bool,), - 'delete_options_propagation_policy': - (str,), - 'delete_options_dry_run': - ([str],), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'delete_options_grace_period_seconds': 'deleteOptions.gracePeriodSeconds', - 'delete_options_preconditions_uid': 'deleteOptions.preconditions.uid', - 'delete_options_preconditions_resource_version': 'deleteOptions.preconditions.resourceVersion', - 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', - 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', - 'delete_options_dry_run': 'deleteOptions.dryRun', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'delete_options_grace_period_seconds': 'query', - 'delete_options_preconditions_uid': 'query', - 'delete_options_preconditions_resource_version': 'query', - 'delete_options_orphan_dependents': 'query', - 'delete_options_propagation_policy': 'query', - 'delete_options_dry_run': 'query', - }, - 'collection_format_map': { - 'delete_options_dry_run': 'multi', - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_cron_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}', - 'operation_id': 'get_cron_workflow', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'get_options_resource_version', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'get_options_resource_version': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'get_options_resource_version': 'getOptions.resourceVersion', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'get_options_resource_version': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.lint_cron_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cron-workflows/{namespace}/lint', - 'operation_id': 'lint_cron_workflow', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'body', - ], - 'required': [ - 'namespace', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.list_cron_workflows_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflowList,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cron-workflows/{namespace}', - 'operation_id': 'list_cron_workflows', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.resume_cron_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}/resume', - 'operation_id': 'resume_cron_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.suspend_cron_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}/suspend', - 'operation_id': 'suspend_cron_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.update_cron_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1CronWorkflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/cron-workflows/{namespace}/{name}', - 'operation_id': 'update_cron_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) + + @validate_call def create_cron_workflow( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1CronWorkflow: + """create_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_cron_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_cron_workflow_with_http_info( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1CronWorkflow]: + """create_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_cron_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_cron_workflow_without_preload_content( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_cron_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_cron_workflow_serialize( self, namespace, body, - **kwargs - ): - """create_cron_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_cron_workflow(namespace, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - body (IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1CronWorkflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['body'] = \ - body - return self.create_cron_workflow_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/cron-workflows/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def delete_cron_workflow( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """delete_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_cron_workflow_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_cron_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """delete_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_cron_workflow_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_cron_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_cron_workflow_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_cron_workflow_serialize( self, namespace, name, - **kwargs - ): - """delete_cron_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_cron_workflow(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - delete_options_grace_period_seconds (str): The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.. [optional] - delete_options_preconditions_uid (str): Specifies the target UID. +optional.. [optional] - delete_options_preconditions_resource_version (str): Specifies the target ResourceVersion +optional.. [optional] - delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] - delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] - delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.delete_cron_workflow_endpoint.call_with_http_info(**kwargs) + delete_options_grace_period_seconds, + delete_options_preconditions_uid, + delete_options_preconditions_resource_version, + delete_options_orphan_dependents, + delete_options_propagation_policy, + delete_options_dry_run, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'deleteOptions.dryRun': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if delete_options_grace_period_seconds is not None: + + _query_params.append(('deleteOptions.gracePeriodSeconds', delete_options_grace_period_seconds)) + + if delete_options_preconditions_uid is not None: + + _query_params.append(('deleteOptions.preconditions.uid', delete_options_preconditions_uid)) + + if delete_options_preconditions_resource_version is not None: + + _query_params.append(('deleteOptions.preconditions.resourceVersion', delete_options_preconditions_resource_version)) + + if delete_options_orphan_dependents is not None: + + _query_params.append(('deleteOptions.orphanDependents', delete_options_orphan_dependents)) + + if delete_options_propagation_policy is not None: + + _query_params.append(('deleteOptions.propagationPolicy', delete_options_propagation_policy)) + + if delete_options_dry_run is not None: + + _query_params.append(('deleteOptions.dryRun', delete_options_dry_run)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/api/v1/cron-workflows/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def get_cron_workflow( + self, + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1CronWorkflow: + """get_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cron_workflow_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_cron_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1CronWorkflow]: + """get_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cron_workflow_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_cron_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cron_workflow_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_cron_workflow_serialize( self, namespace, name, - **kwargs - ): - """get_cron_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_cron_workflow(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - get_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1CronWorkflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.get_cron_workflow_endpoint.call_with_http_info(**kwargs) + get_options_resource_version, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if get_options_resource_version is not None: + + _query_params.append(('getOptions.resourceVersion', get_options_resource_version)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/cron-workflows/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def lint_cron_workflow( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1CronWorkflow: + """lint_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_cron_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def lint_cron_workflow_with_http_info( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1CronWorkflow]: + """lint_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_cron_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def lint_cron_workflow_without_preload_content( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """lint_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_cron_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _lint_cron_workflow_serialize( self, namespace, body, - **kwargs - ): - """lint_cron_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.lint_cron_workflow(namespace, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - body (IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1CronWorkflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['body'] = \ - body - return self.lint_cron_workflow_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/cron-workflows/{namespace}/lint', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def list_cron_workflows( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1CronWorkflowList: + """list_cron_workflows + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_cron_workflows_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflowList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_cron_workflows_with_http_info( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1CronWorkflowList]: + """list_cron_workflows + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_cron_workflows_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflowList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_cron_workflows_without_preload_content( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_cron_workflows + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_cron_workflows_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflowList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_cron_workflows_serialize( self, namespace, - **kwargs - ): - """list_cron_workflows # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_cron_workflows(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1CronWorkflowList - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.list_cron_workflows_endpoint.call_with_http_info(**kwargs) + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/cron-workflows/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def resume_cron_workflow( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1CronWorkflow: + """resume_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resume_cron_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def resume_cron_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1CronWorkflow]: + """resume_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resume_cron_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def resume_cron_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """resume_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resume_cron_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _resume_cron_workflow_serialize( self, namespace, name, body, - **kwargs - ): - """resume_cron_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.resume_cron_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1CronWorkflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.resume_cron_workflow_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/cron-workflows/{namespace}/{name}/resume', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def suspend_cron_workflow( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1CronWorkflow: + """suspend_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suspend_cron_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def suspend_cron_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1CronWorkflow]: + """suspend_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suspend_cron_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def suspend_cron_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """suspend_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suspend_cron_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _suspend_cron_workflow_serialize( self, namespace, name, body, - **kwargs - ): - """suspend_cron_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.suspend_cron_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1CronWorkflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.suspend_cron_workflow_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/cron-workflows/{namespace}/{name}/suspend', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def update_cron_workflow( + self, + namespace: StrictStr, + name: Annotated[StrictStr, Field(description="DEPRECATED: This field is ignored.")], + body: IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1CronWorkflow: + """update_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: DEPRECATED: This field is ignored. (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_cron_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_cron_workflow_with_http_info( + self, + namespace: StrictStr, + name: Annotated[StrictStr, Field(description="DEPRECATED: This field is ignored.")], + body: IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1CronWorkflow]: + """update_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: DEPRECATED: This field is ignored. (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_cron_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_cron_workflow_without_preload_content( + self, + namespace: StrictStr, + name: Annotated[StrictStr, Field(description="DEPRECATED: This field is ignored.")], + body: IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update_cron_workflow + + + :param namespace: (required) + :type namespace: str + :param name: DEPRECATED: This field is ignored. (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_cron_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1CronWorkflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_cron_workflow_serialize( self, namespace, name, body, - **kwargs - ): - """update_cron_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_cron_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): DEPRECATED: This field is ignored. - body (IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1CronWorkflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.update_cron_workflow_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/cron-workflows/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + diff --git a/sdks/python/client/argo_workflows/api/event_service_api.py b/sdks/python/client/argo_workflows/api/event_service_api.py index 3c893df3ccb8..adcc018c5509 100644 --- a/sdks/python/client/argo_workflows/api/event_service_api.py +++ b/sdks/python/client/argo_workflows/api/event_service_api.py @@ -1,370 +1,745 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated -import re # noqa: F401 -import sys # noqa: F401 +from pydantic import Field, StrictBool, StrictStr +from typing import Any, Dict, Optional +from typing_extensions import Annotated +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding_list import IoArgoprojWorkflowV1alpha1WorkflowEventBindingList -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_event_binding_list import IoArgoprojWorkflowV1alpha1WorkflowEventBindingList +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType -class EventServiceApi(object): +class EventServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.list_workflow_event_bindings_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowEventBindingList,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflow-event-bindings/{namespace}', - 'operation_id': 'list_workflow_event_bindings', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + + @validate_call + def list_workflow_event_bindings( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1WorkflowEventBindingList: + """list_workflow_event_bindings + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_workflow_event_bindings_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.receive_event_endpoint = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/events/{namespace}/{discriminator}', - 'operation_id': 'receive_event', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'discriminator', - 'body', - ], - 'required': [ - 'namespace', - 'discriminator', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'discriminator': - (str,), - 'body': - (bool, date, datetime, dict, float, int, list, str, none_type,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'discriminator': 'discriminator', - }, - 'location_map': { - 'namespace': 'path', - 'discriminator': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowEventBindingList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data - def list_workflow_event_bindings( + + @validate_call + def list_workflow_event_bindings_with_http_info( self, - namespace, - **kwargs - ): - """list_workflow_event_bindings # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_workflow_event_bindings(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1WorkflowEventBindingList - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1WorkflowEventBindingList]: + """list_workflow_event_bindings + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_workflow_event_bindings_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowEventBindingList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + + @validate_call + def list_workflow_event_bindings_without_preload_content( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_workflow_event_bindings + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_workflow_event_bindings_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowEventBindingList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + return response_data.response + + + def _list_workflow_event_bindings_serialize( + self, + namespace, + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/workflow-event-bindings/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.list_workflow_event_bindings_endpoint.call_with_http_info(**kwargs) + + + + @validate_call def receive_event( self, - namespace, - discriminator, - body, - **kwargs - ): - """receive_event # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.receive_event(namespace, discriminator, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. - discriminator (str): Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` - body (bool, date, datetime, dict, float, int, list, str, none_type): The event itself can be any data. - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + namespace: Annotated[StrictStr, Field(description="The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces.")], + discriminator: Annotated[StrictStr, Field(description="Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)`")], + body: Annotated[Dict[str, Any], Field(description="The event itself can be any data.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """receive_event + + + :param namespace: The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. (required) + :type namespace: str + :param discriminator: Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` (required) + :type discriminator: str + :param body: The event itself can be any data. (required) + :type body: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._receive_event_serialize( + namespace=namespace, + discriminator=discriminator, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def receive_event_with_http_info( + self, + namespace: Annotated[StrictStr, Field(description="The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces.")], + discriminator: Annotated[StrictStr, Field(description="Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)`")], + body: Annotated[Dict[str, Any], Field(description="The event itself can be any data.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """receive_event + + + :param namespace: The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. (required) + :type namespace: str + :param discriminator: Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` (required) + :type discriminator: str + :param body: The event itself can be any data. (required) + :type body: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._receive_event_serialize( + namespace=namespace, + discriminator=discriminator, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + + @validate_call + def receive_event_without_preload_content( + self, + namespace: Annotated[StrictStr, Field(description="The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces.")], + discriminator: Annotated[StrictStr, Field(description="Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)`")], + body: Annotated[Dict[str, Any], Field(description="The event itself can be any data.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """receive_event + + + :param namespace: The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. (required) + :type namespace: str + :param discriminator: Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` (required) + :type discriminator: str + :param body: The event itself can be any data. (required) + :type body: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._receive_event_serialize( + namespace=namespace, + discriminator=discriminator, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + return response_data.response + + + def _receive_event_serialize( + self, + namespace, + discriminator, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if discriminator is not None: + _path_params['discriminator'] = discriminator + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/events/{namespace}/{discriminator}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['discriminator'] = \ - discriminator - kwargs['body'] = \ - body - return self.receive_event_endpoint.call_with_http_info(**kwargs) + diff --git a/sdks/python/client/argo_workflows/api/event_source_service_api.py b/sdks/python/client/argo_workflows/api/event_source_service_api.py index 874d90c19198..409f23713179 100644 --- a/sdks/python/client/argo_workflows/api/event_source_service_api.py +++ b/sdks/python/client/argo_workflows/api/event_source_service_api.py @@ -1,1221 +1,2577 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated +from pydantic import Field, StrictBool, StrictInt, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from argo_workflows.models.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest +from argo_workflows.models.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList +from argo_workflows.models.stream_result_of_eventsource_event_source_watch_event import StreamResultOfEventsourceEventSourceWatchEvent +from argo_workflows.models.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest -from argo_workflows.model.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList -from argo_workflows.model.stream_result_of_eventsource_event_source_watch_event import StreamResultOfEventsourceEventSourceWatchEvent -from argo_workflows.model.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry - - -class EventSourceServiceApi(object): +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType + + +class EventSourceServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.create_event_source_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojEventsV1alpha1EventSource,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/event-sources/{namespace}', - 'operation_id': 'create_event_source', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'body', - ], - 'required': [ - 'namespace', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'body': - (EventsourceCreateEventSourceRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client + + + @validate_call + def create_event_source( + self, + namespace: StrictStr, + body: EventsourceCreateEventSourceRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojEventsV1alpha1EventSource: + """create_event_source + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: EventsourceCreateEventSourceRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_event_source_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.delete_event_source_endpoint = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/event-sources/{namespace}/{name}', - 'operation_id': 'delete_event_source', - 'http_method': 'DELETE', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'delete_options_grace_period_seconds', - 'delete_options_preconditions_uid', - 'delete_options_preconditions_resource_version', - 'delete_options_orphan_dependents', - 'delete_options_propagation_policy', - 'delete_options_dry_run', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'delete_options_grace_period_seconds': - (str,), - 'delete_options_preconditions_uid': - (str,), - 'delete_options_preconditions_resource_version': - (str,), - 'delete_options_orphan_dependents': - (bool,), - 'delete_options_propagation_policy': - (str,), - 'delete_options_dry_run': - ([str],), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'delete_options_grace_period_seconds': 'deleteOptions.gracePeriodSeconds', - 'delete_options_preconditions_uid': 'deleteOptions.preconditions.uid', - 'delete_options_preconditions_resource_version': 'deleteOptions.preconditions.resourceVersion', - 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', - 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', - 'delete_options_dry_run': 'deleteOptions.dryRun', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'delete_options_grace_period_seconds': 'query', - 'delete_options_preconditions_uid': 'query', - 'delete_options_preconditions_resource_version': 'query', - 'delete_options_orphan_dependents': 'query', - 'delete_options_propagation_policy': 'query', - 'delete_options_dry_run': 'query', - }, - 'collection_format_map': { - 'delete_options_dry_run': 'multi', - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSource", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - self.event_sources_logs_endpoint = _Endpoint( - settings={ - 'response_type': (StreamResultOfEventsourceLogEntry,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/stream/event-sources/{namespace}/logs', - 'operation_id': 'event_sources_logs', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'event_source_type', - 'event_name', - 'grep', - 'pod_log_options_container', - 'pod_log_options_follow', - 'pod_log_options_previous', - 'pod_log_options_since_seconds', - 'pod_log_options_since_time_seconds', - 'pod_log_options_since_time_nanos', - 'pod_log_options_timestamps', - 'pod_log_options_tail_lines', - 'pod_log_options_limit_bytes', - 'pod_log_options_insecure_skip_tls_verify_backend', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'event_source_type': - (str,), - 'event_name': - (str,), - 'grep': - (str,), - 'pod_log_options_container': - (str,), - 'pod_log_options_follow': - (bool,), - 'pod_log_options_previous': - (bool,), - 'pod_log_options_since_seconds': - (str,), - 'pod_log_options_since_time_seconds': - (str,), - 'pod_log_options_since_time_nanos': - (int,), - 'pod_log_options_timestamps': - (bool,), - 'pod_log_options_tail_lines': - (str,), - 'pod_log_options_limit_bytes': - (str,), - 'pod_log_options_insecure_skip_tls_verify_backend': - (bool,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'event_source_type': 'eventSourceType', - 'event_name': 'eventName', - 'grep': 'grep', - 'pod_log_options_container': 'podLogOptions.container', - 'pod_log_options_follow': 'podLogOptions.follow', - 'pod_log_options_previous': 'podLogOptions.previous', - 'pod_log_options_since_seconds': 'podLogOptions.sinceSeconds', - 'pod_log_options_since_time_seconds': 'podLogOptions.sinceTime.seconds', - 'pod_log_options_since_time_nanos': 'podLogOptions.sinceTime.nanos', - 'pod_log_options_timestamps': 'podLogOptions.timestamps', - 'pod_log_options_tail_lines': 'podLogOptions.tailLines', - 'pod_log_options_limit_bytes': 'podLogOptions.limitBytes', - 'pod_log_options_insecure_skip_tls_verify_backend': 'podLogOptions.insecureSkipTLSVerifyBackend', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'query', - 'event_source_type': 'query', - 'event_name': 'query', - 'grep': 'query', - 'pod_log_options_container': 'query', - 'pod_log_options_follow': 'query', - 'pod_log_options_previous': 'query', - 'pod_log_options_since_seconds': 'query', - 'pod_log_options_since_time_seconds': 'query', - 'pod_log_options_since_time_nanos': 'query', - 'pod_log_options_timestamps': 'query', - 'pod_log_options_tail_lines': 'query', - 'pod_log_options_limit_bytes': 'query', - 'pod_log_options_insecure_skip_tls_verify_backend': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_event_source_with_http_info( + self, + namespace: StrictStr, + body: EventsourceCreateEventSourceRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojEventsV1alpha1EventSource]: + """create_event_source + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: EventsourceCreateEventSourceRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_event_source_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.get_event_source_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojEventsV1alpha1EventSource,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/event-sources/{namespace}/{name}', - 'operation_id': 'get_event_source', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSource", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - self.list_event_sources_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojEventsV1alpha1EventSourceList,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/event-sources/{namespace}', - 'operation_id': 'list_event_sources', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - self.update_event_source_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojEventsV1alpha1EventSource,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/event-sources/{namespace}/{name}', - 'operation_id': 'update_event_source', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (EventsourceUpdateEventSourceRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client + + + @validate_call + def create_event_source_without_preload_content( + self, + namespace: StrictStr, + body: EventsourceCreateEventSourceRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create_event_source + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: EventsourceCreateEventSourceRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_event_source_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.watch_event_sources_endpoint = _Endpoint( - settings={ - 'response_type': (StreamResultOfEventsourceEventSourceWatchEvent,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/stream/event-sources/{namespace}', - 'operation_id': 'watch_event_sources', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSource", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - def create_event_source( + + def _create_event_source_serialize( self, namespace, body, - **kwargs - ): - """create_event_source # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_event_source(namespace, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - body (EventsourceCreateEventSourceRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojEventsV1alpha1EventSource - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/event-sources/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def delete_event_source( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """delete_event_source + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_event_source_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_event_source_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """delete_event_source + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_event_source_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['body'] = \ - body - return self.create_event_source_endpoint.call_with_http_info(**kwargs) - def delete_event_source( + + @validate_call + def delete_event_source_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete_event_source + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_event_source_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_event_source_serialize( self, namespace, name, - **kwargs - ): - """delete_event_source # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_event_source(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - delete_options_grace_period_seconds (str): The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.. [optional] - delete_options_preconditions_uid (str): Specifies the target UID. +optional.. [optional] - delete_options_preconditions_resource_version (str): Specifies the target ResourceVersion +optional.. [optional] - delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] - delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] - delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + delete_options_grace_period_seconds, + delete_options_preconditions_uid, + delete_options_preconditions_resource_version, + delete_options_orphan_dependents, + delete_options_propagation_policy, + delete_options_dry_run, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'deleteOptions.dryRun': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if delete_options_grace_period_seconds is not None: + + _query_params.append(('deleteOptions.gracePeriodSeconds', delete_options_grace_period_seconds)) + + if delete_options_preconditions_uid is not None: + + _query_params.append(('deleteOptions.preconditions.uid', delete_options_preconditions_uid)) + + if delete_options_preconditions_resource_version is not None: + + _query_params.append(('deleteOptions.preconditions.resourceVersion', delete_options_preconditions_resource_version)) + + if delete_options_orphan_dependents is not None: + + _query_params.append(('deleteOptions.orphanDependents', delete_options_orphan_dependents)) + + if delete_options_propagation_policy is not None: + + _query_params.append(('deleteOptions.propagationPolicy', delete_options_propagation_policy)) + + if delete_options_dry_run is not None: + + _query_params.append(('deleteOptions.dryRun', delete_options_dry_run)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/api/v1/event-sources/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def event_sources_logs( + self, + namespace: StrictStr, + name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this event source.")] = None, + event_source_type: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this event source type (e.g. `webhook`).")] = None, + event_name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this event name (e.g. `example`).")] = None, + grep: Annotated[Optional[StrictStr], Field(description="optional - only return entries where `msg` matches this regular expression.")] = None, + pod_log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + pod_log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + pod_log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + pod_log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + pod_log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + pod_log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + pod_log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + pod_log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + pod_log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + pod_log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamResultOfEventsourceLogEntry: + """event_sources_logs + + + :param namespace: (required) + :type namespace: str + :param name: optional - only return entries for this event source. + :type name: str + :param event_source_type: optional - only return entries for this event source type (e.g. `webhook`). + :type event_source_type: str + :param event_name: optional - only return entries for this event name (e.g. `example`). + :type event_name: str + :param grep: optional - only return entries where `msg` matches this regular expression. + :type grep: str + :param pod_log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type pod_log_options_container: str + :param pod_log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type pod_log_options_follow: bool + :param pod_log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type pod_log_options_previous: bool + :param pod_log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type pod_log_options_since_seconds: str + :param pod_log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type pod_log_options_since_time_seconds: str + :param pod_log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type pod_log_options_since_time_nanos: int + :param pod_log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type pod_log_options_timestamps: bool + :param pod_log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type pod_log_options_tail_lines: str + :param pod_log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type pod_log_options_limit_bytes: str + :param pod_log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type pod_log_options_insecure_skip_tls_verify_backend: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._event_sources_logs_serialize( + namespace=namespace, + name=name, + event_source_type=event_source_type, + event_name=event_name, + grep=grep, + pod_log_options_container=pod_log_options_container, + pod_log_options_follow=pod_log_options_follow, + pod_log_options_previous=pod_log_options_previous, + pod_log_options_since_seconds=pod_log_options_since_seconds, + pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, + pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, + pod_log_options_timestamps=pod_log_options_timestamps, + pod_log_options_tail_lines=pod_log_options_tail_lines, + pod_log_options_limit_bytes=pod_log_options_limit_bytes, + pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfEventsourceLogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def event_sources_logs_with_http_info( + self, + namespace: StrictStr, + name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this event source.")] = None, + event_source_type: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this event source type (e.g. `webhook`).")] = None, + event_name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this event name (e.g. `example`).")] = None, + grep: Annotated[Optional[StrictStr], Field(description="optional - only return entries where `msg` matches this regular expression.")] = None, + pod_log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + pod_log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + pod_log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + pod_log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + pod_log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + pod_log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + pod_log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + pod_log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + pod_log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + pod_log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamResultOfEventsourceLogEntry]: + """event_sources_logs + + + :param namespace: (required) + :type namespace: str + :param name: optional - only return entries for this event source. + :type name: str + :param event_source_type: optional - only return entries for this event source type (e.g. `webhook`). + :type event_source_type: str + :param event_name: optional - only return entries for this event name (e.g. `example`). + :type event_name: str + :param grep: optional - only return entries where `msg` matches this regular expression. + :type grep: str + :param pod_log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type pod_log_options_container: str + :param pod_log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type pod_log_options_follow: bool + :param pod_log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type pod_log_options_previous: bool + :param pod_log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type pod_log_options_since_seconds: str + :param pod_log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type pod_log_options_since_time_seconds: str + :param pod_log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type pod_log_options_since_time_nanos: int + :param pod_log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type pod_log_options_timestamps: bool + :param pod_log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type pod_log_options_tail_lines: str + :param pod_log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type pod_log_options_limit_bytes: str + :param pod_log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type pod_log_options_insecure_skip_tls_verify_backend: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._event_sources_logs_serialize( + namespace=namespace, + name=name, + event_source_type=event_source_type, + event_name=event_name, + grep=grep, + pod_log_options_container=pod_log_options_container, + pod_log_options_follow=pod_log_options_follow, + pod_log_options_previous=pod_log_options_previous, + pod_log_options_since_seconds=pod_log_options_since_seconds, + pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, + pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, + pod_log_options_timestamps=pod_log_options_timestamps, + pod_log_options_tail_lines=pod_log_options_tail_lines, + pod_log_options_limit_bytes=pod_log_options_limit_bytes, + pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfEventsourceLogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.delete_event_source_endpoint.call_with_http_info(**kwargs) - def event_sources_logs( + + @validate_call + def event_sources_logs_without_preload_content( + self, + namespace: StrictStr, + name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this event source.")] = None, + event_source_type: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this event source type (e.g. `webhook`).")] = None, + event_name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this event name (e.g. `example`).")] = None, + grep: Annotated[Optional[StrictStr], Field(description="optional - only return entries where `msg` matches this regular expression.")] = None, + pod_log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + pod_log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + pod_log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + pod_log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + pod_log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + pod_log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + pod_log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + pod_log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + pod_log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + pod_log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """event_sources_logs + + + :param namespace: (required) + :type namespace: str + :param name: optional - only return entries for this event source. + :type name: str + :param event_source_type: optional - only return entries for this event source type (e.g. `webhook`). + :type event_source_type: str + :param event_name: optional - only return entries for this event name (e.g. `example`). + :type event_name: str + :param grep: optional - only return entries where `msg` matches this regular expression. + :type grep: str + :param pod_log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type pod_log_options_container: str + :param pod_log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type pod_log_options_follow: bool + :param pod_log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type pod_log_options_previous: bool + :param pod_log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type pod_log_options_since_seconds: str + :param pod_log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type pod_log_options_since_time_seconds: str + :param pod_log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type pod_log_options_since_time_nanos: int + :param pod_log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type pod_log_options_timestamps: bool + :param pod_log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type pod_log_options_tail_lines: str + :param pod_log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type pod_log_options_limit_bytes: str + :param pod_log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type pod_log_options_insecure_skip_tls_verify_backend: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._event_sources_logs_serialize( + namespace=namespace, + name=name, + event_source_type=event_source_type, + event_name=event_name, + grep=grep, + pod_log_options_container=pod_log_options_container, + pod_log_options_follow=pod_log_options_follow, + pod_log_options_previous=pod_log_options_previous, + pod_log_options_since_seconds=pod_log_options_since_seconds, + pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, + pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, + pod_log_options_timestamps=pod_log_options_timestamps, + pod_log_options_tail_lines=pod_log_options_tail_lines, + pod_log_options_limit_bytes=pod_log_options_limit_bytes, + pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfEventsourceLogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _event_sources_logs_serialize( self, namespace, - **kwargs - ): - """event_sources_logs # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.event_sources_logs(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - name (str): optional - only return entries for this event source.. [optional] - event_source_type (str): optional - only return entries for this event source type (e.g. `webhook`).. [optional] - event_name (str): optional - only return entries for this event name (e.g. `example`).. [optional] - grep (str): optional - only return entries where `msg` matches this regular expression.. [optional] - pod_log_options_container (str): The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.. [optional] - pod_log_options_follow (bool): Follow the log stream of the pod. Defaults to false. +optional.. [optional] - pod_log_options_previous (bool): Return previous terminated container logs. Defaults to false. +optional.. [optional] - pod_log_options_since_seconds (str): A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.. [optional] - pod_log_options_since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional] - pod_log_options_since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional] - pod_log_options_timestamps (bool): If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.. [optional] - pod_log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.. [optional] - pod_log_options_limit_bytes (str): If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.. [optional] - pod_log_options_insecure_skip_tls_verify_backend (bool): insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfEventsourceLogEntry - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + name, + event_source_type, + event_name, + grep, + pod_log_options_container, + pod_log_options_follow, + pod_log_options_previous, + pod_log_options_since_seconds, + pod_log_options_since_time_seconds, + pod_log_options_since_time_nanos, + pod_log_options_timestamps, + pod_log_options_tail_lines, + pod_log_options_limit_bytes, + pod_log_options_insecure_skip_tls_verify_backend, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if name is not None: + + _query_params.append(('name', name)) + + if event_source_type is not None: + + _query_params.append(('eventSourceType', event_source_type)) + + if event_name is not None: + + _query_params.append(('eventName', event_name)) + + if grep is not None: + + _query_params.append(('grep', grep)) + + if pod_log_options_container is not None: + + _query_params.append(('podLogOptions.container', pod_log_options_container)) + + if pod_log_options_follow is not None: + + _query_params.append(('podLogOptions.follow', pod_log_options_follow)) + + if pod_log_options_previous is not None: + + _query_params.append(('podLogOptions.previous', pod_log_options_previous)) + + if pod_log_options_since_seconds is not None: + + _query_params.append(('podLogOptions.sinceSeconds', pod_log_options_since_seconds)) + + if pod_log_options_since_time_seconds is not None: + + _query_params.append(('podLogOptions.sinceTime.seconds', pod_log_options_since_time_seconds)) + + if pod_log_options_since_time_nanos is not None: + + _query_params.append(('podLogOptions.sinceTime.nanos', pod_log_options_since_time_nanos)) + + if pod_log_options_timestamps is not None: + + _query_params.append(('podLogOptions.timestamps', pod_log_options_timestamps)) + + if pod_log_options_tail_lines is not None: + + _query_params.append(('podLogOptions.tailLines', pod_log_options_tail_lines)) + + if pod_log_options_limit_bytes is not None: + + _query_params.append(('podLogOptions.limitBytes', pod_log_options_limit_bytes)) + + if pod_log_options_insecure_skip_tls_verify_backend is not None: + + _query_params.append(('podLogOptions.insecureSkipTLSVerifyBackend', pod_log_options_insecure_skip_tls_verify_backend)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/stream/event-sources/{namespace}/logs', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def get_event_source( + self, + namespace: StrictStr, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojEventsV1alpha1EventSource: + """get_event_source + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_event_source_serialize( + namespace=namespace, + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSource", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_event_source_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojEventsV1alpha1EventSource]: + """get_event_source + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_event_source_serialize( + namespace=namespace, + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSource", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.event_sources_logs_endpoint.call_with_http_info(**kwargs) - def get_event_source( + + @validate_call + def get_event_source_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_event_source + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_event_source_serialize( + namespace=namespace, + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSource", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_event_source_serialize( self, namespace, name, - **kwargs - ): - """get_event_source # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_event_source(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojEventsV1alpha1EventSource - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/event-sources/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def list_event_sources( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojEventsV1alpha1EventSourceList: + """list_event_sources + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_event_sources_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSourceList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_event_sources_with_http_info( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojEventsV1alpha1EventSourceList]: + """list_event_sources + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_event_sources_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSourceList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.get_event_source_endpoint.call_with_http_info(**kwargs) - def list_event_sources( + + @validate_call + def list_event_sources_without_preload_content( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_event_sources + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_event_sources_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSourceList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_event_sources_serialize( self, namespace, - **kwargs - ): - """list_event_sources # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_event_sources(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojEventsV1alpha1EventSourceList - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/event-sources/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def update_event_source( + self, + namespace: StrictStr, + name: StrictStr, + body: EventsourceUpdateEventSourceRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojEventsV1alpha1EventSource: + """update_event_source + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: EventsourceUpdateEventSourceRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_event_source_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSource", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_event_source_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: EventsourceUpdateEventSourceRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojEventsV1alpha1EventSource]: + """update_event_source + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: EventsourceUpdateEventSourceRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_event_source_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSource", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.list_event_sources_endpoint.call_with_http_info(**kwargs) - def update_event_source( + + @validate_call + def update_event_source_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: EventsourceUpdateEventSourceRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update_event_source + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: EventsourceUpdateEventSourceRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_event_source_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1EventSource", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_event_source_serialize( self, namespace, name, body, - **kwargs - ): - """update_event_source # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_event_source(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (EventsourceUpdateEventSourceRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojEventsV1alpha1EventSource - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/event-sources/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + + + + @validate_call + def watch_event_sources( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamResultOfEventsourceEventSourceWatchEvent: + """watch_event_sources + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_event_sources_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfEventsourceEventSourceWatchEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.update_event_source_endpoint.call_with_http_info(**kwargs) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data - def watch_event_sources( + + @validate_call + def watch_event_sources_with_http_info( self, - namespace, - **kwargs - ): - """watch_event_sources # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.watch_event_sources(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfEventsourceEventSourceWatchEvent - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamResultOfEventsourceEventSourceWatchEvent]: + """watch_event_sources + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_event_sources_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfEventsourceEventSourceWatchEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + + @validate_call + def watch_event_sources_without_preload_content( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """watch_event_sources + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_event_sources_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfEventsourceEventSourceWatchEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + return response_data.response + + + def _watch_event_sources_serialize( + self, + namespace, + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/stream/event-sources/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.watch_event_sources_endpoint.call_with_http_info(**kwargs) + diff --git a/sdks/python/client/argo_workflows/api/info_service_api.py b/sdks/python/client/argo_workflows/api/info_service_api.py index bced53622b91..25ff0354183b 100644 --- a/sdks/python/client/argo_workflows/api/info_service_api.py +++ b/sdks/python/client/argo_workflows/api/info_service_api.py @@ -1,519 +1,1027 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated -import re # noqa: F401 -import sys # noqa: F401 +from argo_workflows.models.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_get_user_info_response import IoArgoprojWorkflowV1alpha1GetUserInfoResponse +from argo_workflows.models.io_argoproj_workflow_v1alpha1_info_response import IoArgoprojWorkflowV1alpha1InfoResponse +from argo_workflows.models.io_argoproj_workflow_v1alpha1_version import IoArgoprojWorkflowV1alpha1Version -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_get_user_info_response import IoArgoprojWorkflowV1alpha1GetUserInfoResponse -from argo_workflows.model.io_argoproj_workflow_v1alpha1_info_response import IoArgoprojWorkflowV1alpha1InfoResponse -from argo_workflows.model.io_argoproj_workflow_v1alpha1_version import IoArgoprojWorkflowV1alpha1Version +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType -class InfoServiceApi(object): +class InfoServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.collect_event_endpoint = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/tracking/event', - 'operation_id': 'collect_event', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'body', - ], - 'required': [ - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'body': - (IoArgoprojWorkflowV1alpha1CollectEventRequest,), - }, - 'attribute_map': { - }, - 'location_map': { - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.get_info_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1InfoResponse,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/info', - 'operation_id': 'get_info', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - ], - 'required': [], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - }, - 'attribute_map': { - }, - 'location_map': { - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_user_info_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1GetUserInfoResponse,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/userinfo', - 'operation_id': 'get_user_info', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - ], - 'required': [], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - }, - 'attribute_map': { - }, - 'location_map': { - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_version_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Version,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/version', - 'operation_id': 'get_version', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - ], - 'required': [], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - }, - 'attribute_map': { - }, - 'location_map': { - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) + + @validate_call def collect_event( + self, + body: IoArgoprojWorkflowV1alpha1CollectEventRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """collect_event + + + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CollectEventRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._collect_event_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def collect_event_with_http_info( + self, + body: IoArgoprojWorkflowV1alpha1CollectEventRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """collect_event + + + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CollectEventRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._collect_event_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def collect_event_without_preload_content( + self, + body: IoArgoprojWorkflowV1alpha1CollectEventRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """collect_event + + + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1CollectEventRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._collect_event_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _collect_event_serialize( self, body, - **kwargs - ): - """collect_event # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.collect_event(body, async_req=True) - >>> result = thread.get() - - Args: - body (IoArgoprojWorkflowV1alpha1CollectEventRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['body'] = \ - body - return self.collect_event_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/tracking/event', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def get_info( self, - **kwargs - ): - """get_info # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_info(async_req=True) - >>> result = thread.get() - - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1InfoResponse - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - return self.get_info_endpoint.call_with_http_info(**kwargs) + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1InfoResponse: + """get_info + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_info_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1InfoResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_info_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1InfoResponse]: + """get_info + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_info_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1InfoResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_info_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_info + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_info_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1InfoResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_info_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/info', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def get_user_info( self, - **kwargs - ): - """get_user_info # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_user_info(async_req=True) - >>> result = thread.get() - - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1GetUserInfoResponse - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - return self.get_user_info_endpoint.call_with_http_info(**kwargs) + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1GetUserInfoResponse: + """get_user_info + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_info_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1GetUserInfoResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_user_info_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1GetUserInfoResponse]: + """get_user_info + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_info_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1GetUserInfoResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_user_info_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_user_info + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_info_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1GetUserInfoResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_user_info_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/userinfo', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def get_version( self, - **kwargs - ): - """get_version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_version(async_req=True) - >>> result = thread.get() - - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Version - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - return self.get_version_endpoint.call_with_http_info(**kwargs) + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Version: + """get_version + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_version_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Version", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_version_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Version]: + """get_version + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_version_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Version", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_version_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_version + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_version_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Version", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_version_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/version', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + diff --git a/sdks/python/client/argo_workflows/api/sensor_service_api.py b/sdks/python/client/argo_workflows/api/sensor_service_api.py index c8e7d89c762b..227f0a7dd4b8 100644 --- a/sdks/python/client/argo_workflows/api/sensor_service_api.py +++ b/sdks/python/client/argo_workflows/api/sensor_service_api.py @@ -1,1221 +1,2577 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated +from pydantic import Field, StrictBool, StrictInt, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList +from argo_workflows.models.sensor_create_sensor_request import SensorCreateSensorRequest +from argo_workflows.models.sensor_update_sensor_request import SensorUpdateSensorRequest +from argo_workflows.models.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry +from argo_workflows.models.stream_result_of_sensor_sensor_watch_event import StreamResultOfSensorSensorWatchEvent -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList -from argo_workflows.model.sensor_create_sensor_request import SensorCreateSensorRequest -from argo_workflows.model.sensor_update_sensor_request import SensorUpdateSensorRequest -from argo_workflows.model.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry -from argo_workflows.model.stream_result_of_sensor_sensor_watch_event import StreamResultOfSensorSensorWatchEvent - - -class SensorServiceApi(object): +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType + + +class SensorServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.create_sensor_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojEventsV1alpha1Sensor,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/sensors/{namespace}', - 'operation_id': 'create_sensor', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'body', - ], - 'required': [ - 'namespace', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'body': - (SensorCreateSensorRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client + + + @validate_call + def create_sensor( + self, + namespace: StrictStr, + body: SensorCreateSensorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojEventsV1alpha1Sensor: + """create_sensor + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: SensorCreateSensorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_sensor_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.delete_sensor_endpoint = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/sensors/{namespace}/{name}', - 'operation_id': 'delete_sensor', - 'http_method': 'DELETE', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'delete_options_grace_period_seconds', - 'delete_options_preconditions_uid', - 'delete_options_preconditions_resource_version', - 'delete_options_orphan_dependents', - 'delete_options_propagation_policy', - 'delete_options_dry_run', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'delete_options_grace_period_seconds': - (str,), - 'delete_options_preconditions_uid': - (str,), - 'delete_options_preconditions_resource_version': - (str,), - 'delete_options_orphan_dependents': - (bool,), - 'delete_options_propagation_policy': - (str,), - 'delete_options_dry_run': - ([str],), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'delete_options_grace_period_seconds': 'deleteOptions.gracePeriodSeconds', - 'delete_options_preconditions_uid': 'deleteOptions.preconditions.uid', - 'delete_options_preconditions_resource_version': 'deleteOptions.preconditions.resourceVersion', - 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', - 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', - 'delete_options_dry_run': 'deleteOptions.dryRun', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'delete_options_grace_period_seconds': 'query', - 'delete_options_preconditions_uid': 'query', - 'delete_options_preconditions_resource_version': 'query', - 'delete_options_orphan_dependents': 'query', - 'delete_options_propagation_policy': 'query', - 'delete_options_dry_run': 'query', - }, - 'collection_format_map': { - 'delete_options_dry_run': 'multi', - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1Sensor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - self.get_sensor_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojEventsV1alpha1Sensor,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/sensors/{namespace}/{name}', - 'operation_id': 'get_sensor', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'get_options_resource_version', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'get_options_resource_version': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'get_options_resource_version': 'getOptions.resourceVersion', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'get_options_resource_version': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_sensor_with_http_info( + self, + namespace: StrictStr, + body: SensorCreateSensorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojEventsV1alpha1Sensor]: + """create_sensor + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: SensorCreateSensorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_sensor_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.list_sensors_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojEventsV1alpha1SensorList,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/sensors/{namespace}', - 'operation_id': 'list_sensors', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1Sensor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - self.sensors_logs_endpoint = _Endpoint( - settings={ - 'response_type': (StreamResultOfSensorLogEntry,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/stream/sensors/{namespace}/logs', - 'operation_id': 'sensors_logs', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'trigger_name', - 'grep', - 'pod_log_options_container', - 'pod_log_options_follow', - 'pod_log_options_previous', - 'pod_log_options_since_seconds', - 'pod_log_options_since_time_seconds', - 'pod_log_options_since_time_nanos', - 'pod_log_options_timestamps', - 'pod_log_options_tail_lines', - 'pod_log_options_limit_bytes', - 'pod_log_options_insecure_skip_tls_verify_backend', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'trigger_name': - (str,), - 'grep': - (str,), - 'pod_log_options_container': - (str,), - 'pod_log_options_follow': - (bool,), - 'pod_log_options_previous': - (bool,), - 'pod_log_options_since_seconds': - (str,), - 'pod_log_options_since_time_seconds': - (str,), - 'pod_log_options_since_time_nanos': - (int,), - 'pod_log_options_timestamps': - (bool,), - 'pod_log_options_tail_lines': - (str,), - 'pod_log_options_limit_bytes': - (str,), - 'pod_log_options_insecure_skip_tls_verify_backend': - (bool,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'trigger_name': 'triggerName', - 'grep': 'grep', - 'pod_log_options_container': 'podLogOptions.container', - 'pod_log_options_follow': 'podLogOptions.follow', - 'pod_log_options_previous': 'podLogOptions.previous', - 'pod_log_options_since_seconds': 'podLogOptions.sinceSeconds', - 'pod_log_options_since_time_seconds': 'podLogOptions.sinceTime.seconds', - 'pod_log_options_since_time_nanos': 'podLogOptions.sinceTime.nanos', - 'pod_log_options_timestamps': 'podLogOptions.timestamps', - 'pod_log_options_tail_lines': 'podLogOptions.tailLines', - 'pod_log_options_limit_bytes': 'podLogOptions.limitBytes', - 'pod_log_options_insecure_skip_tls_verify_backend': 'podLogOptions.insecureSkipTLSVerifyBackend', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'query', - 'trigger_name': 'query', - 'grep': 'query', - 'pod_log_options_container': 'query', - 'pod_log_options_follow': 'query', - 'pod_log_options_previous': 'query', - 'pod_log_options_since_seconds': 'query', - 'pod_log_options_since_time_seconds': 'query', - 'pod_log_options_since_time_nanos': 'query', - 'pod_log_options_timestamps': 'query', - 'pod_log_options_tail_lines': 'query', - 'pod_log_options_limit_bytes': 'query', - 'pod_log_options_insecure_skip_tls_verify_backend': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - self.update_sensor_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojEventsV1alpha1Sensor,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/sensors/{namespace}/{name}', - 'operation_id': 'update_sensor', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (SensorUpdateSensorRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client + + + @validate_call + def create_sensor_without_preload_content( + self, + namespace: StrictStr, + body: SensorCreateSensorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create_sensor + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: SensorCreateSensorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_sensor_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - self.watch_sensors_endpoint = _Endpoint( - settings={ - 'response_type': (StreamResultOfSensorSensorWatchEvent,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/stream/sensors/{namespace}', - 'operation_id': 'watch_sensors', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1Sensor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - def create_sensor( + + def _create_sensor_serialize( self, namespace, body, - **kwargs - ): - """create_sensor # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_sensor(namespace, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - body (SensorCreateSensorRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojEventsV1alpha1Sensor - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/sensors/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def delete_sensor( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """delete_sensor + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_sensor_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_sensor_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """delete_sensor + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_sensor_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['body'] = \ - body - return self.create_sensor_endpoint.call_with_http_info(**kwargs) - def delete_sensor( + + @validate_call + def delete_sensor_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete_sensor + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_sensor_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_sensor_serialize( self, namespace, name, - **kwargs - ): - """delete_sensor # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_sensor(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - delete_options_grace_period_seconds (str): The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.. [optional] - delete_options_preconditions_uid (str): Specifies the target UID. +optional.. [optional] - delete_options_preconditions_resource_version (str): Specifies the target ResourceVersion +optional.. [optional] - delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] - delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] - delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + delete_options_grace_period_seconds, + delete_options_preconditions_uid, + delete_options_preconditions_resource_version, + delete_options_orphan_dependents, + delete_options_propagation_policy, + delete_options_dry_run, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'deleteOptions.dryRun': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if delete_options_grace_period_seconds is not None: + + _query_params.append(('deleteOptions.gracePeriodSeconds', delete_options_grace_period_seconds)) + + if delete_options_preconditions_uid is not None: + + _query_params.append(('deleteOptions.preconditions.uid', delete_options_preconditions_uid)) + + if delete_options_preconditions_resource_version is not None: + + _query_params.append(('deleteOptions.preconditions.resourceVersion', delete_options_preconditions_resource_version)) + + if delete_options_orphan_dependents is not None: + + _query_params.append(('deleteOptions.orphanDependents', delete_options_orphan_dependents)) + + if delete_options_propagation_policy is not None: + + _query_params.append(('deleteOptions.propagationPolicy', delete_options_propagation_policy)) + + if delete_options_dry_run is not None: + + _query_params.append(('deleteOptions.dryRun', delete_options_dry_run)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/api/v1/sensors/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def get_sensor( + self, + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojEventsV1alpha1Sensor: + """get_sensor + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_sensor_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1Sensor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_sensor_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojEventsV1alpha1Sensor]: + """get_sensor + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_sensor_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1Sensor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.delete_sensor_endpoint.call_with_http_info(**kwargs) - def get_sensor( + + @validate_call + def get_sensor_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_sensor + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_sensor_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1Sensor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_sensor_serialize( self, namespace, name, - **kwargs - ): - """get_sensor # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_sensor(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - get_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojEventsV1alpha1Sensor - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + get_options_resource_version, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if get_options_resource_version is not None: + + _query_params.append(('getOptions.resourceVersion', get_options_resource_version)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/sensors/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def list_sensors( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojEventsV1alpha1SensorList: + """list_sensors + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_sensors_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1SensorList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_sensors_with_http_info( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojEventsV1alpha1SensorList]: + """list_sensors + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_sensors_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1SensorList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.get_sensor_endpoint.call_with_http_info(**kwargs) - def list_sensors( + + @validate_call + def list_sensors_without_preload_content( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_sensors + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_sensors_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1SensorList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_sensors_serialize( self, namespace, - **kwargs - ): - """list_sensors # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_sensors(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojEventsV1alpha1SensorList - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/sensors/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def sensors_logs( + self, + namespace: StrictStr, + name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this sensor name.")] = None, + trigger_name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this trigger.")] = None, + grep: Annotated[Optional[StrictStr], Field(description="option - only return entries where `msg` contains this regular expressions.")] = None, + pod_log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + pod_log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + pod_log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + pod_log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + pod_log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + pod_log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + pod_log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + pod_log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + pod_log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + pod_log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamResultOfSensorLogEntry: + """sensors_logs + + + :param namespace: (required) + :type namespace: str + :param name: optional - only return entries for this sensor name. + :type name: str + :param trigger_name: optional - only return entries for this trigger. + :type trigger_name: str + :param grep: option - only return entries where `msg` contains this regular expressions. + :type grep: str + :param pod_log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type pod_log_options_container: str + :param pod_log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type pod_log_options_follow: bool + :param pod_log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type pod_log_options_previous: bool + :param pod_log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type pod_log_options_since_seconds: str + :param pod_log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type pod_log_options_since_time_seconds: str + :param pod_log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type pod_log_options_since_time_nanos: int + :param pod_log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type pod_log_options_timestamps: bool + :param pod_log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type pod_log_options_tail_lines: str + :param pod_log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type pod_log_options_limit_bytes: str + :param pod_log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type pod_log_options_insecure_skip_tls_verify_backend: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._sensors_logs_serialize( + namespace=namespace, + name=name, + trigger_name=trigger_name, + grep=grep, + pod_log_options_container=pod_log_options_container, + pod_log_options_follow=pod_log_options_follow, + pod_log_options_previous=pod_log_options_previous, + pod_log_options_since_seconds=pod_log_options_since_seconds, + pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, + pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, + pod_log_options_timestamps=pod_log_options_timestamps, + pod_log_options_tail_lines=pod_log_options_tail_lines, + pod_log_options_limit_bytes=pod_log_options_limit_bytes, + pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfSensorLogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def sensors_logs_with_http_info( + self, + namespace: StrictStr, + name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this sensor name.")] = None, + trigger_name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this trigger.")] = None, + grep: Annotated[Optional[StrictStr], Field(description="option - only return entries where `msg` contains this regular expressions.")] = None, + pod_log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + pod_log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + pod_log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + pod_log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + pod_log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + pod_log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + pod_log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + pod_log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + pod_log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + pod_log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamResultOfSensorLogEntry]: + """sensors_logs + + + :param namespace: (required) + :type namespace: str + :param name: optional - only return entries for this sensor name. + :type name: str + :param trigger_name: optional - only return entries for this trigger. + :type trigger_name: str + :param grep: option - only return entries where `msg` contains this regular expressions. + :type grep: str + :param pod_log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type pod_log_options_container: str + :param pod_log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type pod_log_options_follow: bool + :param pod_log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type pod_log_options_previous: bool + :param pod_log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type pod_log_options_since_seconds: str + :param pod_log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type pod_log_options_since_time_seconds: str + :param pod_log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type pod_log_options_since_time_nanos: int + :param pod_log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type pod_log_options_timestamps: bool + :param pod_log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type pod_log_options_tail_lines: str + :param pod_log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type pod_log_options_limit_bytes: str + :param pod_log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type pod_log_options_insecure_skip_tls_verify_backend: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._sensors_logs_serialize( + namespace=namespace, + name=name, + trigger_name=trigger_name, + grep=grep, + pod_log_options_container=pod_log_options_container, + pod_log_options_follow=pod_log_options_follow, + pod_log_options_previous=pod_log_options_previous, + pod_log_options_since_seconds=pod_log_options_since_seconds, + pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, + pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, + pod_log_options_timestamps=pod_log_options_timestamps, + pod_log_options_tail_lines=pod_log_options_tail_lines, + pod_log_options_limit_bytes=pod_log_options_limit_bytes, + pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfSensorLogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.list_sensors_endpoint.call_with_http_info(**kwargs) - def sensors_logs( + + @validate_call + def sensors_logs_without_preload_content( + self, + namespace: StrictStr, + name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this sensor name.")] = None, + trigger_name: Annotated[Optional[StrictStr], Field(description="optional - only return entries for this trigger.")] = None, + grep: Annotated[Optional[StrictStr], Field(description="option - only return entries where `msg` contains this regular expressions.")] = None, + pod_log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + pod_log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + pod_log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + pod_log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + pod_log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + pod_log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + pod_log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + pod_log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + pod_log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + pod_log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """sensors_logs + + + :param namespace: (required) + :type namespace: str + :param name: optional - only return entries for this sensor name. + :type name: str + :param trigger_name: optional - only return entries for this trigger. + :type trigger_name: str + :param grep: option - only return entries where `msg` contains this regular expressions. + :type grep: str + :param pod_log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type pod_log_options_container: str + :param pod_log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type pod_log_options_follow: bool + :param pod_log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type pod_log_options_previous: bool + :param pod_log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type pod_log_options_since_seconds: str + :param pod_log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type pod_log_options_since_time_seconds: str + :param pod_log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type pod_log_options_since_time_nanos: int + :param pod_log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type pod_log_options_timestamps: bool + :param pod_log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type pod_log_options_tail_lines: str + :param pod_log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type pod_log_options_limit_bytes: str + :param pod_log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type pod_log_options_insecure_skip_tls_verify_backend: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._sensors_logs_serialize( + namespace=namespace, + name=name, + trigger_name=trigger_name, + grep=grep, + pod_log_options_container=pod_log_options_container, + pod_log_options_follow=pod_log_options_follow, + pod_log_options_previous=pod_log_options_previous, + pod_log_options_since_seconds=pod_log_options_since_seconds, + pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, + pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, + pod_log_options_timestamps=pod_log_options_timestamps, + pod_log_options_tail_lines=pod_log_options_tail_lines, + pod_log_options_limit_bytes=pod_log_options_limit_bytes, + pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfSensorLogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _sensors_logs_serialize( self, namespace, - **kwargs - ): - """sensors_logs # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.sensors_logs(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - name (str): optional - only return entries for this sensor name.. [optional] - trigger_name (str): optional - only return entries for this trigger.. [optional] - grep (str): option - only return entries where `msg` contains this regular expressions.. [optional] - pod_log_options_container (str): The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.. [optional] - pod_log_options_follow (bool): Follow the log stream of the pod. Defaults to false. +optional.. [optional] - pod_log_options_previous (bool): Return previous terminated container logs. Defaults to false. +optional.. [optional] - pod_log_options_since_seconds (str): A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.. [optional] - pod_log_options_since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional] - pod_log_options_since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional] - pod_log_options_timestamps (bool): If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.. [optional] - pod_log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.. [optional] - pod_log_options_limit_bytes (str): If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.. [optional] - pod_log_options_insecure_skip_tls_verify_backend (bool): insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfSensorLogEntry - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + name, + trigger_name, + grep, + pod_log_options_container, + pod_log_options_follow, + pod_log_options_previous, + pod_log_options_since_seconds, + pod_log_options_since_time_seconds, + pod_log_options_since_time_nanos, + pod_log_options_timestamps, + pod_log_options_tail_lines, + pod_log_options_limit_bytes, + pod_log_options_insecure_skip_tls_verify_backend, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if name is not None: + + _query_params.append(('name', name)) + + if trigger_name is not None: + + _query_params.append(('triggerName', trigger_name)) + + if grep is not None: + + _query_params.append(('grep', grep)) + + if pod_log_options_container is not None: + + _query_params.append(('podLogOptions.container', pod_log_options_container)) + + if pod_log_options_follow is not None: + + _query_params.append(('podLogOptions.follow', pod_log_options_follow)) + + if pod_log_options_previous is not None: + + _query_params.append(('podLogOptions.previous', pod_log_options_previous)) + + if pod_log_options_since_seconds is not None: + + _query_params.append(('podLogOptions.sinceSeconds', pod_log_options_since_seconds)) + + if pod_log_options_since_time_seconds is not None: + + _query_params.append(('podLogOptions.sinceTime.seconds', pod_log_options_since_time_seconds)) + + if pod_log_options_since_time_nanos is not None: + + _query_params.append(('podLogOptions.sinceTime.nanos', pod_log_options_since_time_nanos)) + + if pod_log_options_timestamps is not None: + + _query_params.append(('podLogOptions.timestamps', pod_log_options_timestamps)) + + if pod_log_options_tail_lines is not None: + + _query_params.append(('podLogOptions.tailLines', pod_log_options_tail_lines)) + + if pod_log_options_limit_bytes is not None: + + _query_params.append(('podLogOptions.limitBytes', pod_log_options_limit_bytes)) + + if pod_log_options_insecure_skip_tls_verify_backend is not None: + + _query_params.append(('podLogOptions.insecureSkipTLSVerifyBackend', pod_log_options_insecure_skip_tls_verify_backend)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/stream/sensors/{namespace}/logs', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + + + + + @validate_call + def update_sensor( + self, + namespace: StrictStr, + name: StrictStr, + body: SensorUpdateSensorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojEventsV1alpha1Sensor: + """update_sensor + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: SensorUpdateSensorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_sensor_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1Sensor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_sensor_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: SensorUpdateSensorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojEventsV1alpha1Sensor]: + """update_sensor + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: SensorUpdateSensorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_sensor_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1Sensor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.sensors_logs_endpoint.call_with_http_info(**kwargs) - def update_sensor( + + @validate_call + def update_sensor_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: SensorUpdateSensorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update_sensor + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: SensorUpdateSensorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_sensor_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojEventsV1alpha1Sensor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_sensor_serialize( self, namespace, name, body, - **kwargs - ): - """update_sensor # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_sensor(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (SensorUpdateSensorRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojEventsV1alpha1Sensor - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/sensors/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + + + + + @validate_call + def watch_sensors( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamResultOfSensorSensorWatchEvent: + """watch_sensors + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_sensors_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfSensorSensorWatchEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.update_sensor_endpoint.call_with_http_info(**kwargs) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data - def watch_sensors( + + @validate_call + def watch_sensors_with_http_info( self, - namespace, - **kwargs - ): - """watch_sensors # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.watch_sensors(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfSensorSensorWatchEvent - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamResultOfSensorSensorWatchEvent]: + """watch_sensors + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_sensors_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfSensorSensorWatchEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None + + + @validate_call + def watch_sensors_without_preload_content( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """watch_sensors + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_sensors_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfSensorSensorWatchEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True + return response_data.response + + + def _watch_sensors_serialize( + self, + namespace, + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/stream/sensors/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.watch_sensors_endpoint.call_with_http_info(**kwargs) + diff --git a/sdks/python/client/argo_workflows/api/workflow_service_api.py b/sdks/python/client/argo_workflows/api/workflow_service_api.py index 838e3545d1b9..b673b2d72ca5 100644 --- a/sdks/python/client/argo_workflows/api/workflow_service_api.py +++ b/sdks/python/client/argo_workflows/api/workflow_service_api.py @@ -1,2838 +1,5920 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import IoArgoprojWorkflowV1alpha1WorkflowCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_lint_request import IoArgoprojWorkflowV1alpha1WorkflowLintRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_resubmit_request import IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_resume_request import IoArgoprojWorkflowV1alpha1WorkflowResumeRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_retry_request import IoArgoprojWorkflowV1alpha1WorkflowRetryRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_set_request import IoArgoprojWorkflowV1alpha1WorkflowSetRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_stop_request import IoArgoprojWorkflowV1alpha1WorkflowStopRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_submit_request import IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_suspend_request import IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_terminate_request import IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest -from argo_workflows.model.stream_result_of_event import StreamResultOfEvent -from argo_workflows.model.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry -from argo_workflows.model.stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event import StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent - - -class WorkflowServiceApi(object): +from pydantic import Field, StrictBool, StrictInt, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_create_request import IoArgoprojWorkflowV1alpha1WorkflowCreateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_lint_request import IoArgoprojWorkflowV1alpha1WorkflowLintRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resubmit_request import IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resume_request import IoArgoprojWorkflowV1alpha1WorkflowResumeRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_retry_request import IoArgoprojWorkflowV1alpha1WorkflowRetryRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_set_request import IoArgoprojWorkflowV1alpha1WorkflowSetRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_stop_request import IoArgoprojWorkflowV1alpha1WorkflowStopRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_submit_request import IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_suspend_request import IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_terminate_request import IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest +from argo_workflows.models.stream_result_of_event import StreamResultOfEvent +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event import StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent + +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType + + +class WorkflowServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.create_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}', - 'operation_id': 'create_workflow', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'body', - ], - 'required': [ - 'namespace', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowCreateRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.delete_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}', - 'operation_id': 'delete_workflow', - 'http_method': 'DELETE', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'delete_options_grace_period_seconds', - 'delete_options_preconditions_uid', - 'delete_options_preconditions_resource_version', - 'delete_options_orphan_dependents', - 'delete_options_propagation_policy', - 'delete_options_dry_run', - 'force', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'delete_options_grace_period_seconds': - (str,), - 'delete_options_preconditions_uid': - (str,), - 'delete_options_preconditions_resource_version': - (str,), - 'delete_options_orphan_dependents': - (bool,), - 'delete_options_propagation_policy': - (str,), - 'delete_options_dry_run': - ([str],), - 'force': - (bool,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'delete_options_grace_period_seconds': 'deleteOptions.gracePeriodSeconds', - 'delete_options_preconditions_uid': 'deleteOptions.preconditions.uid', - 'delete_options_preconditions_resource_version': 'deleteOptions.preconditions.resourceVersion', - 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', - 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', - 'delete_options_dry_run': 'deleteOptions.dryRun', - 'force': 'force', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'delete_options_grace_period_seconds': 'query', - 'delete_options_preconditions_uid': 'query', - 'delete_options_preconditions_resource_version': 'query', - 'delete_options_orphan_dependents': 'query', - 'delete_options_propagation_policy': 'query', - 'delete_options_dry_run': 'query', - 'force': 'query', - }, - 'collection_format_map': { - 'delete_options_dry_run': 'multi', - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}', - 'operation_id': 'get_workflow', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'get_options_resource_version', - 'fields', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'get_options_resource_version': - (str,), - 'fields': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'get_options_resource_version': 'getOptions.resourceVersion', - 'fields': 'fields', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'get_options_resource_version': 'query', - 'fields': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.lint_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/lint', - 'operation_id': 'lint_workflow', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'body', - ], - 'required': [ - 'namespace', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowLintRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.list_workflows_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowList,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}', - 'operation_id': 'list_workflows', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - 'fields', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - 'fields': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - 'fields': 'fields', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - 'fields': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.pod_logs_endpoint = _Endpoint( - settings={ - 'response_type': (StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/{podName}/log', - 'operation_id': 'pod_logs', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'pod_name', - 'log_options_container', - 'log_options_follow', - 'log_options_previous', - 'log_options_since_seconds', - 'log_options_since_time_seconds', - 'log_options_since_time_nanos', - 'log_options_timestamps', - 'log_options_tail_lines', - 'log_options_limit_bytes', - 'log_options_insecure_skip_tls_verify_backend', - 'grep', - 'selector', - ], - 'required': [ - 'namespace', - 'name', - 'pod_name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'pod_name': - (str,), - 'log_options_container': - (str,), - 'log_options_follow': - (bool,), - 'log_options_previous': - (bool,), - 'log_options_since_seconds': - (str,), - 'log_options_since_time_seconds': - (str,), - 'log_options_since_time_nanos': - (int,), - 'log_options_timestamps': - (bool,), - 'log_options_tail_lines': - (str,), - 'log_options_limit_bytes': - (str,), - 'log_options_insecure_skip_tls_verify_backend': - (bool,), - 'grep': - (str,), - 'selector': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'pod_name': 'podName', - 'log_options_container': 'logOptions.container', - 'log_options_follow': 'logOptions.follow', - 'log_options_previous': 'logOptions.previous', - 'log_options_since_seconds': 'logOptions.sinceSeconds', - 'log_options_since_time_seconds': 'logOptions.sinceTime.seconds', - 'log_options_since_time_nanos': 'logOptions.sinceTime.nanos', - 'log_options_timestamps': 'logOptions.timestamps', - 'log_options_tail_lines': 'logOptions.tailLines', - 'log_options_limit_bytes': 'logOptions.limitBytes', - 'log_options_insecure_skip_tls_verify_backend': 'logOptions.insecureSkipTLSVerifyBackend', - 'grep': 'grep', - 'selector': 'selector', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'pod_name': 'path', - 'log_options_container': 'query', - 'log_options_follow': 'query', - 'log_options_previous': 'query', - 'log_options_since_seconds': 'query', - 'log_options_since_time_seconds': 'query', - 'log_options_since_time_nanos': 'query', - 'log_options_timestamps': 'query', - 'log_options_tail_lines': 'query', - 'log_options_limit_bytes': 'query', - 'log_options_insecure_skip_tls_verify_backend': 'query', - 'grep': 'query', - 'selector': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.resubmit_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/resubmit', - 'operation_id': 'resubmit_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.resume_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/resume', - 'operation_id': 'resume_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowResumeRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.retry_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/retry', - 'operation_id': 'retry_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowRetryRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.set_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/set', - 'operation_id': 'set_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowSetRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.stop_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/stop', - 'operation_id': 'stop_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowStopRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.submit_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/submit', - 'operation_id': 'submit_workflow', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'body', - ], - 'required': [ - 'namespace', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.suspend_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/suspend', - 'operation_id': 'suspend_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.terminate_workflow_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1Workflow,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/terminate', - 'operation_id': 'terminate_workflow', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.watch_events_endpoint = _Endpoint( - settings={ - 'response_type': (StreamResultOfEvent,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/stream/events/{namespace}', - 'operation_id': 'watch_events', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.watch_workflows_endpoint = _Endpoint( - settings={ - 'response_type': (StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflow-events/{namespace}', - 'operation_id': 'watch_workflows', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - 'fields', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - 'fields': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - 'fields': 'fields', - }, - 'location_map': { - 'namespace': 'path', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - 'fields': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.workflow_logs_endpoint = _Endpoint( - settings={ - 'response_type': (StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflows/{namespace}/{name}/log', - 'operation_id': 'workflow_logs', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'pod_name', - 'log_options_container', - 'log_options_follow', - 'log_options_previous', - 'log_options_since_seconds', - 'log_options_since_time_seconds', - 'log_options_since_time_nanos', - 'log_options_timestamps', - 'log_options_tail_lines', - 'log_options_limit_bytes', - 'log_options_insecure_skip_tls_verify_backend', - 'grep', - 'selector', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'pod_name': - (str,), - 'log_options_container': - (str,), - 'log_options_follow': - (bool,), - 'log_options_previous': - (bool,), - 'log_options_since_seconds': - (str,), - 'log_options_since_time_seconds': - (str,), - 'log_options_since_time_nanos': - (int,), - 'log_options_timestamps': - (bool,), - 'log_options_tail_lines': - (str,), - 'log_options_limit_bytes': - (str,), - 'log_options_insecure_skip_tls_verify_backend': - (bool,), - 'grep': - (str,), - 'selector': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'pod_name': 'podName', - 'log_options_container': 'logOptions.container', - 'log_options_follow': 'logOptions.follow', - 'log_options_previous': 'logOptions.previous', - 'log_options_since_seconds': 'logOptions.sinceSeconds', - 'log_options_since_time_seconds': 'logOptions.sinceTime.seconds', - 'log_options_since_time_nanos': 'logOptions.sinceTime.nanos', - 'log_options_timestamps': 'logOptions.timestamps', - 'log_options_tail_lines': 'logOptions.tailLines', - 'log_options_limit_bytes': 'logOptions.limitBytes', - 'log_options_insecure_skip_tls_verify_backend': 'logOptions.insecureSkipTLSVerifyBackend', - 'grep': 'grep', - 'selector': 'selector', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'pod_name': 'query', - 'log_options_container': 'query', - 'log_options_follow': 'query', - 'log_options_previous': 'query', - 'log_options_since_seconds': 'query', - 'log_options_since_time_seconds': 'query', - 'log_options_since_time_nanos': 'query', - 'log_options_timestamps': 'query', - 'log_options_tail_lines': 'query', - 'log_options_limit_bytes': 'query', - 'log_options_insecure_skip_tls_verify_backend': 'query', - 'grep': 'query', - 'selector': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) + + @validate_call def create_workflow( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowCreateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """create_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowCreateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_workflow_with_http_info( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowCreateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """create_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowCreateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_workflow_without_preload_content( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowCreateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowCreateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_workflow_serialize( self, namespace, body, - **kwargs - ): - """create_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_workflow(namespace, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - body (IoArgoprojWorkflowV1alpha1WorkflowCreateRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['body'] = \ - body - return self.create_workflow_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/workflows/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def delete_workflow( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + force: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """delete_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param force: + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_workflow_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + force: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """delete_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param force: + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_workflow_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + force: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param force: + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_workflow_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_workflow_serialize( self, namespace, name, - **kwargs - ): - """delete_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_workflow(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - delete_options_grace_period_seconds (str): The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.. [optional] - delete_options_preconditions_uid (str): Specifies the target UID. +optional.. [optional] - delete_options_preconditions_resource_version (str): Specifies the target ResourceVersion +optional.. [optional] - delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] - delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] - delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.. [optional] - force (bool): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.delete_workflow_endpoint.call_with_http_info(**kwargs) + delete_options_grace_period_seconds, + delete_options_preconditions_uid, + delete_options_preconditions_resource_version, + delete_options_orphan_dependents, + delete_options_propagation_policy, + delete_options_dry_run, + force, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'deleteOptions.dryRun': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if delete_options_grace_period_seconds is not None: + + _query_params.append(('deleteOptions.gracePeriodSeconds', delete_options_grace_period_seconds)) + + if delete_options_preconditions_uid is not None: + + _query_params.append(('deleteOptions.preconditions.uid', delete_options_preconditions_uid)) + + if delete_options_preconditions_resource_version is not None: + + _query_params.append(('deleteOptions.preconditions.resourceVersion', delete_options_preconditions_resource_version)) + + if delete_options_orphan_dependents is not None: + + _query_params.append(('deleteOptions.orphanDependents', delete_options_orphan_dependents)) + + if delete_options_propagation_policy is not None: + + _query_params.append(('deleteOptions.propagationPolicy', delete_options_propagation_policy)) + + if delete_options_dry_run is not None: + + _query_params.append(('deleteOptions.dryRun', delete_options_dry_run)) + + if force is not None: + + _query_params.append(('force', force)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/api/v1/workflows/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def get_workflow( self, - namespace, - name, - **kwargs - ): - """get_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_workflow(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - get_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - fields (str): Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\".. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.get_workflow_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + fields: Annotated[Optional[StrictStr], Field(description="Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\".")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """get_workflow - def lint_workflow( + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param fields: Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\". + :type fields: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + fields=fields, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_workflow_with_http_info( self, - namespace, - body, - **kwargs - ): - """lint_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.lint_workflow(namespace, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - body (IoArgoprojWorkflowV1alpha1WorkflowLintRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['body'] = \ - body - return self.lint_workflow_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + fields: Annotated[Optional[StrictStr], Field(description="Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\".")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """get_workflow - def list_workflows( + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param fields: Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\". + :type fields: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + fields=fields, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_workflow_without_preload_content( self, - namespace, - **kwargs - ): - """list_workflows # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_workflows(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - fields (str): Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\".. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1WorkflowList - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.list_workflows_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + fields: Annotated[Optional[StrictStr], Field(description="Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\".")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_workflow - def pod_logs( + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param fields: Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\". + :type fields: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + fields=fields, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_workflow_serialize( self, namespace, name, - pod_name, - **kwargs - ): - """DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.pod_logs(namespace, name, pod_name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - pod_name (str): - - Keyword Args: - log_options_container (str): The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.. [optional] - log_options_follow (bool): Follow the log stream of the pod. Defaults to false. +optional.. [optional] - log_options_previous (bool): Return previous terminated container logs. Defaults to false. +optional.. [optional] - log_options_since_seconds (str): A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.. [optional] - log_options_since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional] - log_options_since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional] - log_options_timestamps (bool): If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.. [optional] - log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.. [optional] - log_options_limit_bytes (str): If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.. [optional] - log_options_insecure_skip_tls_verify_backend (bool): insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.. [optional] - grep (str): [optional] - selector (str): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['pod_name'] = \ - pod_name - return self.pod_logs_endpoint.call_with_http_info(**kwargs) + get_options_resource_version, + fields, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - def resubmit_workflow( + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if get_options_resource_version is not None: + + _query_params.append(('getOptions.resourceVersion', get_options_resource_version)) + + if fields is not None: + + _query_params.append(('fields', fields)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/workflows/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def lint_workflow( self, - namespace, - name, - body, - **kwargs - ): - """resubmit_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.resubmit_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.resubmit_workflow_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowLintRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """lint_workflow - def resume_workflow( + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowLintRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def lint_workflow_with_http_info( self, - namespace, - name, - body, - **kwargs - ): - """resume_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.resume_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (IoArgoprojWorkflowV1alpha1WorkflowResumeRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.resume_workflow_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowLintRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """lint_workflow - def retry_workflow( + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowLintRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def lint_workflow_without_preload_content( self, - namespace, - name, - body, - **kwargs - ): - """retry_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.retry_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (IoArgoprojWorkflowV1alpha1WorkflowRetryRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.retry_workflow_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowLintRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """lint_workflow - def set_workflow( + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowLintRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _lint_workflow_serialize( self, namespace, - name, body, - **kwargs - ): - """set_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.set_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (IoArgoprojWorkflowV1alpha1WorkflowSetRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.set_workflow_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - def stop_workflow( + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/workflows/{namespace}/lint', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def list_workflows( self, - namespace, - name, - body, - **kwargs - ): - """stop_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.stop_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (IoArgoprojWorkflowV1alpha1WorkflowStopRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.stop_workflow_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + fields: Annotated[Optional[StrictStr], Field(description="Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\".")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1WorkflowList: + """list_workflows - def submit_workflow( + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param fields: Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\". + :type fields: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_workflows_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + fields=fields, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_workflows_with_http_info( self, - namespace, - body, - **kwargs - ): - """submit_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.submit_workflow(namespace, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - body (IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['body'] = \ - body - return self.submit_workflow_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + fields: Annotated[Optional[StrictStr], Field(description="Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\".")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1WorkflowList]: + """list_workflows - def suspend_workflow( + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param fields: Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\". + :type fields: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_workflows_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + fields=fields, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_workflows_without_preload_content( self, - namespace, - name, - body, - **kwargs - ): - """suspend_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.suspend_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.suspend_workflow_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + fields: Annotated[Optional[StrictStr], Field(description="Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\".")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_workflows - def terminate_workflow( + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param fields: Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\". + :type fields: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_workflows_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + fields=fields, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_workflows_serialize( self, namespace, - name, - body, - **kwargs - ): - """terminate_workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.terminate_workflow(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - body (IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1Workflow - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.terminate_workflow_endpoint.call_with_http_info(**kwargs) + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + fields, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - def watch_events( + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + if fields is not None: + + _query_params.append(('fields', fields)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/workflows/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def pod_logs( self, - namespace, - **kwargs - ): - """watch_events # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.watch_events(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfEvent - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.watch_events_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + name: StrictStr, + pod_name: StrictStr, + log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + grep: Optional[StrictStr] = None, + selector: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry: + """DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. - def watch_workflows( + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param pod_name: (required) + :type pod_name: str + :param log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type log_options_container: str + :param log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type log_options_follow: bool + :param log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type log_options_previous: bool + :param log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type log_options_since_seconds: str + :param log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type log_options_since_time_seconds: str + :param log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type log_options_since_time_nanos: int + :param log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type log_options_timestamps: bool + :param log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type log_options_tail_lines: str + :param log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type log_options_limit_bytes: str + :param log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type log_options_insecure_skip_tls_verify_backend: bool + :param grep: + :type grep: str + :param selector: + :type selector: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._pod_logs_serialize( + namespace=namespace, + name=name, + pod_name=pod_name, + log_options_container=log_options_container, + log_options_follow=log_options_follow, + log_options_previous=log_options_previous, + log_options_since_seconds=log_options_since_seconds, + log_options_since_time_seconds=log_options_since_time_seconds, + log_options_since_time_nanos=log_options_since_time_nanos, + log_options_timestamps=log_options_timestamps, + log_options_tail_lines=log_options_tail_lines, + log_options_limit_bytes=log_options_limit_bytes, + log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, + grep=grep, + selector=selector, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def pod_logs_with_http_info( self, - namespace, - **kwargs - ): - """watch_workflows # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.watch_workflows(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - fields (str): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.watch_workflows_endpoint.call_with_http_info(**kwargs) + namespace: StrictStr, + name: StrictStr, + pod_name: StrictStr, + log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + grep: Optional[StrictStr] = None, + selector: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry]: + """DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. - def workflow_logs( + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param pod_name: (required) + :type pod_name: str + :param log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type log_options_container: str + :param log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type log_options_follow: bool + :param log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type log_options_previous: bool + :param log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type log_options_since_seconds: str + :param log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type log_options_since_time_seconds: str + :param log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type log_options_since_time_nanos: int + :param log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type log_options_timestamps: bool + :param log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type log_options_tail_lines: str + :param log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type log_options_limit_bytes: str + :param log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type log_options_insecure_skip_tls_verify_backend: bool + :param grep: + :type grep: str + :param selector: + :type selector: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._pod_logs_serialize( + namespace=namespace, + name=name, + pod_name=pod_name, + log_options_container=log_options_container, + log_options_follow=log_options_follow, + log_options_previous=log_options_previous, + log_options_since_seconds=log_options_since_seconds, + log_options_since_time_seconds=log_options_since_time_seconds, + log_options_since_time_nanos=log_options_since_time_nanos, + log_options_timestamps=log_options_timestamps, + log_options_tail_lines=log_options_tail_lines, + log_options_limit_bytes=log_options_limit_bytes, + log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, + grep=grep, + selector=selector, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def pod_logs_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + pod_name: StrictStr, + log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + grep: Optional[StrictStr] = None, + selector: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param pod_name: (required) + :type pod_name: str + :param log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type log_options_container: str + :param log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type log_options_follow: bool + :param log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type log_options_previous: bool + :param log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type log_options_since_seconds: str + :param log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type log_options_since_time_seconds: str + :param log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type log_options_since_time_nanos: int + :param log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type log_options_timestamps: bool + :param log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type log_options_tail_lines: str + :param log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type log_options_limit_bytes: str + :param log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type log_options_insecure_skip_tls_verify_backend: bool + :param grep: + :type grep: str + :param selector: + :type selector: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._pod_logs_serialize( + namespace=namespace, + name=name, + pod_name=pod_name, + log_options_container=log_options_container, + log_options_follow=log_options_follow, + log_options_previous=log_options_previous, + log_options_since_seconds=log_options_since_seconds, + log_options_since_time_seconds=log_options_since_time_seconds, + log_options_since_time_nanos=log_options_since_time_nanos, + log_options_timestamps=log_options_timestamps, + log_options_tail_lines=log_options_tail_lines, + log_options_limit_bytes=log_options_limit_bytes, + log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, + grep=grep, + selector=selector, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _pod_logs_serialize( self, namespace, name, - **kwargs - ): - """workflow_logs # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.workflow_logs(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - pod_name (str): [optional] - log_options_container (str): The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.. [optional] - log_options_follow (bool): Follow the log stream of the pod. Defaults to false. +optional.. [optional] - log_options_previous (bool): Return previous terminated container logs. Defaults to false. +optional.. [optional] - log_options_since_seconds (str): A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.. [optional] - log_options_since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional] - log_options_since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional] - log_options_timestamps (bool): If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.. [optional] - log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.. [optional] - log_options_limit_bytes (str): If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.. [optional] - log_options_insecure_skip_tls_verify_backend (bool): insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.. [optional] - grep (str): [optional] - selector (str): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.workflow_logs_endpoint.call_with_http_info(**kwargs) + pod_name, + log_options_container, + log_options_follow, + log_options_previous, + log_options_since_seconds, + log_options_since_time_seconds, + log_options_since_time_nanos, + log_options_timestamps, + log_options_tail_lines, + log_options_limit_bytes, + log_options_insecure_skip_tls_verify_backend, + grep, + selector, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + if pod_name is not None: + _path_params['podName'] = pod_name + # process the query parameters + if log_options_container is not None: + + _query_params.append(('logOptions.container', log_options_container)) + + if log_options_follow is not None: + + _query_params.append(('logOptions.follow', log_options_follow)) + + if log_options_previous is not None: + + _query_params.append(('logOptions.previous', log_options_previous)) + + if log_options_since_seconds is not None: + + _query_params.append(('logOptions.sinceSeconds', log_options_since_seconds)) + + if log_options_since_time_seconds is not None: + + _query_params.append(('logOptions.sinceTime.seconds', log_options_since_time_seconds)) + + if log_options_since_time_nanos is not None: + + _query_params.append(('logOptions.sinceTime.nanos', log_options_since_time_nanos)) + + if log_options_timestamps is not None: + + _query_params.append(('logOptions.timestamps', log_options_timestamps)) + + if log_options_tail_lines is not None: + + _query_params.append(('logOptions.tailLines', log_options_tail_lines)) + + if log_options_limit_bytes is not None: + + _query_params.append(('logOptions.limitBytes', log_options_limit_bytes)) + + if log_options_insecure_skip_tls_verify_backend is not None: + + _query_params.append(('logOptions.insecureSkipTLSVerifyBackend', log_options_insecure_skip_tls_verify_backend)) + + if grep is not None: + + _query_params.append(('grep', grep)) + + if selector is not None: + + _query_params.append(('selector', selector)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/workflows/{namespace}/{name}/{podName}/log', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def resubmit_workflow( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """resubmit_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resubmit_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def resubmit_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """resubmit_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resubmit_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def resubmit_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """resubmit_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resubmit_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _resubmit_workflow_serialize( + self, + namespace, + name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/workflows/{namespace}/{name}/resubmit', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def resume_workflow( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowResumeRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """resume_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowResumeRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resume_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def resume_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowResumeRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """resume_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowResumeRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resume_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def resume_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowResumeRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """resume_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowResumeRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resume_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _resume_workflow_serialize( + self, + namespace, + name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/workflows/{namespace}/{name}/resume', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def retry_workflow( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowRetryRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """retry_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowRetryRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._retry_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def retry_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowRetryRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """retry_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowRetryRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._retry_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def retry_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowRetryRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """retry_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowRetryRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._retry_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _retry_workflow_serialize( + self, + namespace, + name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/workflows/{namespace}/{name}/retry', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_workflow( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowSetRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """set_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowSetRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowSetRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """set_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowSetRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowSetRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """set_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowSetRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_workflow_serialize( + self, + namespace, + name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/workflows/{namespace}/{name}/set', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def stop_workflow( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowStopRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """stop_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowStopRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._stop_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def stop_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowStopRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """stop_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowStopRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._stop_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def stop_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowStopRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """stop_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowStopRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._stop_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _stop_workflow_serialize( + self, + namespace, + name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/workflows/{namespace}/{name}/stop', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def submit_workflow( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """submit_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._submit_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def submit_workflow_with_http_info( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """submit_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._submit_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def submit_workflow_without_preload_content( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """submit_workflow + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._submit_workflow_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _submit_workflow_serialize( + self, + namespace, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/workflows/{namespace}/submit', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def suspend_workflow( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """suspend_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suspend_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def suspend_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """suspend_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suspend_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def suspend_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """suspend_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suspend_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _suspend_workflow_serialize( + self, + namespace, + name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/workflows/{namespace}/{name}/suspend', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def terminate_workflow( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1Workflow: + """terminate_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._terminate_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def terminate_workflow_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1Workflow]: + """terminate_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._terminate_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def terminate_workflow_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """terminate_workflow + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._terminate_workflow_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _terminate_workflow_serialize( + self, + namespace, + name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/workflows/{namespace}/{name}/terminate', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def watch_events( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamResultOfEvent: + """watch_events + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_events_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def watch_events_with_http_info( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamResultOfEvent]: + """watch_events + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_events_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def watch_events_without_preload_content( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """watch_events + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_events_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _watch_events_serialize( + self, + namespace, + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/stream/events/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def watch_workflows( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + fields: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent: + """watch_workflows + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param fields: + :type fields: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_workflows_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + fields=fields, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def watch_workflows_with_http_info( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + fields: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent]: + """watch_workflows + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param fields: + :type fields: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_workflows_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + fields=fields, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def watch_workflows_without_preload_content( + self, + namespace: StrictStr, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + fields: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """watch_workflows + + + :param namespace: (required) + :type namespace: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param fields: + :type fields: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._watch_workflows_serialize( + namespace=namespace, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + fields=fields, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _watch_workflows_serialize( + self, + namespace, + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + fields, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + if fields is not None: + + _query_params.append(('fields', fields)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/workflow-events/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def workflow_logs( + self, + namespace: StrictStr, + name: StrictStr, + pod_name: Optional[StrictStr] = None, + log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + grep: Optional[StrictStr] = None, + selector: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry: + """workflow_logs + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param pod_name: + :type pod_name: str + :param log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type log_options_container: str + :param log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type log_options_follow: bool + :param log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type log_options_previous: bool + :param log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type log_options_since_seconds: str + :param log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type log_options_since_time_seconds: str + :param log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type log_options_since_time_nanos: int + :param log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type log_options_timestamps: bool + :param log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type log_options_tail_lines: str + :param log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type log_options_limit_bytes: str + :param log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type log_options_insecure_skip_tls_verify_backend: bool + :param grep: + :type grep: str + :param selector: + :type selector: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_logs_serialize( + namespace=namespace, + name=name, + pod_name=pod_name, + log_options_container=log_options_container, + log_options_follow=log_options_follow, + log_options_previous=log_options_previous, + log_options_since_seconds=log_options_since_seconds, + log_options_since_time_seconds=log_options_since_time_seconds, + log_options_since_time_nanos=log_options_since_time_nanos, + log_options_timestamps=log_options_timestamps, + log_options_tail_lines=log_options_tail_lines, + log_options_limit_bytes=log_options_limit_bytes, + log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, + grep=grep, + selector=selector, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def workflow_logs_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + pod_name: Optional[StrictStr] = None, + log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + grep: Optional[StrictStr] = None, + selector: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry]: + """workflow_logs + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param pod_name: + :type pod_name: str + :param log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type log_options_container: str + :param log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type log_options_follow: bool + :param log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type log_options_previous: bool + :param log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type log_options_since_seconds: str + :param log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type log_options_since_time_seconds: str + :param log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type log_options_since_time_nanos: int + :param log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type log_options_timestamps: bool + :param log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type log_options_tail_lines: str + :param log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type log_options_limit_bytes: str + :param log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type log_options_insecure_skip_tls_verify_backend: bool + :param grep: + :type grep: str + :param selector: + :type selector: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_logs_serialize( + namespace=namespace, + name=name, + pod_name=pod_name, + log_options_container=log_options_container, + log_options_follow=log_options_follow, + log_options_previous=log_options_previous, + log_options_since_seconds=log_options_since_seconds, + log_options_since_time_seconds=log_options_since_time_seconds, + log_options_since_time_nanos=log_options_since_time_nanos, + log_options_timestamps=log_options_timestamps, + log_options_tail_lines=log_options_tail_lines, + log_options_limit_bytes=log_options_limit_bytes, + log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, + grep=grep, + selector=selector, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def workflow_logs_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + pod_name: Optional[StrictStr] = None, + log_options_container: Annotated[Optional[StrictStr], Field(description="The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional.")] = None, + log_options_follow: Annotated[Optional[StrictBool], Field(description="Follow the log stream of the pod. Defaults to false. +optional.")] = None, + log_options_previous: Annotated[Optional[StrictBool], Field(description="Return previous terminated container logs. Defaults to false. +optional.")] = None, + log_options_since_seconds: Annotated[Optional[StrictStr], Field(description="A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional.")] = None, + log_options_since_time_seconds: Annotated[Optional[StrictStr], Field(description="Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.")] = None, + log_options_since_time_nanos: Annotated[Optional[StrictInt], Field(description="Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.")] = None, + log_options_timestamps: Annotated[Optional[StrictBool], Field(description="If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.")] = None, + log_options_tail_lines: Annotated[Optional[StrictStr], Field(description="If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.")] = None, + log_options_limit_bytes: Annotated[Optional[StrictStr], Field(description="If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.")] = None, + log_options_insecure_skip_tls_verify_backend: Annotated[Optional[StrictBool], Field(description="insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.")] = None, + grep: Optional[StrictStr] = None, + selector: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """workflow_logs + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param pod_name: + :type pod_name: str + :param log_options_container: The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. + :type log_options_container: str + :param log_options_follow: Follow the log stream of the pod. Defaults to false. +optional. + :type log_options_follow: bool + :param log_options_previous: Return previous terminated container logs. Defaults to false. +optional. + :type log_options_previous: bool + :param log_options_since_seconds: A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. + :type log_options_since_seconds: str + :param log_options_since_time_seconds: Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + :type log_options_since_time_seconds: str + :param log_options_since_time_nanos: Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. + :type log_options_since_time_nanos: int + :param log_options_timestamps: If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. + :type log_options_timestamps: bool + :param log_options_tail_lines: If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + :type log_options_tail_lines: str + :param log_options_limit_bytes: If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. + :type log_options_limit_bytes: str + :param log_options_insecure_skip_tls_verify_backend: insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + :type log_options_insecure_skip_tls_verify_backend: bool + :param grep: + :type grep: str + :param selector: + :type selector: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._workflow_logs_serialize( + namespace=namespace, + name=name, + pod_name=pod_name, + log_options_container=log_options_container, + log_options_follow=log_options_follow, + log_options_previous=log_options_previous, + log_options_since_seconds=log_options_since_seconds, + log_options_since_time_seconds=log_options_since_time_seconds, + log_options_since_time_nanos=log_options_since_time_nanos, + log_options_timestamps=log_options_timestamps, + log_options_tail_lines=log_options_tail_lines, + log_options_limit_bytes=log_options_limit_bytes, + log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, + grep=grep, + selector=selector, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _workflow_logs_serialize( + self, + namespace, + name, + pod_name, + log_options_container, + log_options_follow, + log_options_previous, + log_options_since_seconds, + log_options_since_time_seconds, + log_options_since_time_nanos, + log_options_timestamps, + log_options_tail_lines, + log_options_limit_bytes, + log_options_insecure_skip_tls_verify_backend, + grep, + selector, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if pod_name is not None: + + _query_params.append(('podName', pod_name)) + + if log_options_container is not None: + + _query_params.append(('logOptions.container', log_options_container)) + + if log_options_follow is not None: + + _query_params.append(('logOptions.follow', log_options_follow)) + + if log_options_previous is not None: + + _query_params.append(('logOptions.previous', log_options_previous)) + + if log_options_since_seconds is not None: + + _query_params.append(('logOptions.sinceSeconds', log_options_since_seconds)) + + if log_options_since_time_seconds is not None: + + _query_params.append(('logOptions.sinceTime.seconds', log_options_since_time_seconds)) + + if log_options_since_time_nanos is not None: + + _query_params.append(('logOptions.sinceTime.nanos', log_options_since_time_nanos)) + + if log_options_timestamps is not None: + + _query_params.append(('logOptions.timestamps', log_options_timestamps)) + + if log_options_tail_lines is not None: + + _query_params.append(('logOptions.tailLines', log_options_tail_lines)) + + if log_options_limit_bytes is not None: + + _query_params.append(('logOptions.limitBytes', log_options_limit_bytes)) + + if log_options_insecure_skip_tls_verify_backend is not None: + + _query_params.append(('logOptions.insecureSkipTLSVerifyBackend', log_options_insecure_skip_tls_verify_backend)) + + if grep is not None: + + _query_params.append(('grep', grep)) + + if selector is not None: + + _query_params.append(('selector', selector)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/workflows/{namespace}/{name}/log', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + diff --git a/sdks/python/client/argo_workflows/api/workflow_template_service_api.py b/sdks/python/client/argo_workflows/api/workflow_template_service_api.py index 3b01cc57210a..753b4345248a 100644 --- a/sdks/python/client/argo_workflows/api/workflow_template_service_api.py +++ b/sdks/python/client/argo_workflows/api/workflow_template_service_api.py @@ -1,977 +1,1992 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated +from pydantic import Field, StrictBool, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_create_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_list import IoArgoprojWorkflowV1alpha1WorkflowTemplateList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_update_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.api_client import ApiClient, Endpoint as _Endpoint -from argo_workflows.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types -) -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_create_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_list import IoArgoprojWorkflowV1alpha1WorkflowTemplateList -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_update_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest - - -class WorkflowTemplateServiceApi(object): +from argo_workflows.api_client import ApiClient, RequestSerialized +from argo_workflows.api_response import ApiResponse +from argo_workflows.rest import RESTResponseType + + +class WorkflowTemplateServiceApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - self.create_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflow-templates/{namespace}', - 'operation_id': 'create_workflow_template', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'body', - ], - 'required': [ - 'namespace', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.delete_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflow-templates/{namespace}/{name}', - 'operation_id': 'delete_workflow_template', - 'http_method': 'DELETE', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'delete_options_grace_period_seconds', - 'delete_options_preconditions_uid', - 'delete_options_preconditions_resource_version', - 'delete_options_orphan_dependents', - 'delete_options_propagation_policy', - 'delete_options_dry_run', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'delete_options_grace_period_seconds': - (str,), - 'delete_options_preconditions_uid': - (str,), - 'delete_options_preconditions_resource_version': - (str,), - 'delete_options_orphan_dependents': - (bool,), - 'delete_options_propagation_policy': - (str,), - 'delete_options_dry_run': - ([str],), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'delete_options_grace_period_seconds': 'deleteOptions.gracePeriodSeconds', - 'delete_options_preconditions_uid': 'deleteOptions.preconditions.uid', - 'delete_options_preconditions_resource_version': 'deleteOptions.preconditions.resourceVersion', - 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', - 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', - 'delete_options_dry_run': 'deleteOptions.dryRun', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'delete_options_grace_period_seconds': 'query', - 'delete_options_preconditions_uid': 'query', - 'delete_options_preconditions_resource_version': 'query', - 'delete_options_orphan_dependents': 'query', - 'delete_options_propagation_policy': 'query', - 'delete_options_dry_run': 'query', - }, - 'collection_format_map': { - 'delete_options_dry_run': 'multi', - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.get_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflow-templates/{namespace}/{name}', - 'operation_id': 'get_workflow_template', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'get_options_resource_version', - ], - 'required': [ - 'namespace', - 'name', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'get_options_resource_version': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - 'get_options_resource_version': 'getOptions.resourceVersion', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'get_options_resource_version': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.lint_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflow-templates/{namespace}/lint', - 'operation_id': 'lint_workflow_template', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'body', - ], - 'required': [ - 'namespace', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - }, - 'location_map': { - 'namespace': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - self.list_workflow_templates_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplateList,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflow-templates/{namespace}', - 'operation_id': 'list_workflow_templates', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name_pattern', - 'list_options_label_selector', - 'list_options_field_selector', - 'list_options_watch', - 'list_options_allow_watch_bookmarks', - 'list_options_resource_version', - 'list_options_resource_version_match', - 'list_options_timeout_seconds', - 'list_options_limit', - 'list_options_continue', - ], - 'required': [ - 'namespace', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name_pattern': - (str,), - 'list_options_label_selector': - (str,), - 'list_options_field_selector': - (str,), - 'list_options_watch': - (bool,), - 'list_options_allow_watch_bookmarks': - (bool,), - 'list_options_resource_version': - (str,), - 'list_options_resource_version_match': - (str,), - 'list_options_timeout_seconds': - (str,), - 'list_options_limit': - (str,), - 'list_options_continue': - (str,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name_pattern': 'namePattern', - 'list_options_label_selector': 'listOptions.labelSelector', - 'list_options_field_selector': 'listOptions.fieldSelector', - 'list_options_watch': 'listOptions.watch', - 'list_options_allow_watch_bookmarks': 'listOptions.allowWatchBookmarks', - 'list_options_resource_version': 'listOptions.resourceVersion', - 'list_options_resource_version_match': 'listOptions.resourceVersionMatch', - 'list_options_timeout_seconds': 'listOptions.timeoutSeconds', - 'list_options_limit': 'listOptions.limit', - 'list_options_continue': 'listOptions.continue', - }, - 'location_map': { - 'namespace': 'path', - 'name_pattern': 'query', - 'list_options_label_selector': 'query', - 'list_options_field_selector': 'query', - 'list_options_watch': 'query', - 'list_options_allow_watch_bookmarks': 'query', - 'list_options_resource_version': 'query', - 'list_options_resource_version_match': 'query', - 'list_options_timeout_seconds': 'query', - 'list_options_limit': 'query', - 'list_options_continue': 'query', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], - }, - api_client=api_client - ) - self.update_workflow_template_endpoint = _Endpoint( - settings={ - 'response_type': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), - 'auth': [ - 'BearerToken' - ], - 'endpoint_path': '/api/v1/workflow-templates/{namespace}/{name}', - 'operation_id': 'update_workflow_template', - 'http_method': 'PUT', - 'servers': None, - }, - params_map={ - 'all': [ - 'namespace', - 'name', - 'body', - ], - 'required': [ - 'namespace', - 'name', - 'body', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'namespace': - (str,), - 'name': - (str,), - 'body': - (IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest,), - }, - 'attribute_map': { - 'namespace': 'namespace', - 'name': 'name', - }, - 'location_map': { - 'namespace': 'path', - 'name': 'path', - 'body': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) + + @validate_call def create_workflow_template( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1WorkflowTemplate: + """create_workflow_template + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_workflow_template_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_workflow_template_with_http_info( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1WorkflowTemplate]: + """create_workflow_template + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_workflow_template_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_workflow_template_without_preload_content( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create_workflow_template + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_workflow_template_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_workflow_template_serialize( self, namespace, body, - **kwargs - ): - """create_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_workflow_template(namespace, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - body (IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1WorkflowTemplate - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['body'] = \ - body - return self.create_workflow_template_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/workflow-templates/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def delete_workflow_template( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """delete_workflow_template + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_workflow_template_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_workflow_template_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """delete_workflow_template + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_workflow_template_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_workflow_template_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + delete_options_grace_period_seconds: Annotated[Optional[StrictStr], Field(description="The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.")] = None, + delete_options_preconditions_uid: Annotated[Optional[StrictStr], Field(description="Specifies the target UID. +optional.")] = None, + delete_options_preconditions_resource_version: Annotated[Optional[StrictStr], Field(description="Specifies the target ResourceVersion +optional.")] = None, + delete_options_orphan_dependents: Annotated[Optional[StrictBool], Field(description="Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.")] = None, + delete_options_propagation_policy: Annotated[Optional[StrictStr], Field(description="Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.")] = None, + delete_options_dry_run: Annotated[Optional[List[StrictStr]], Field(description="When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete_workflow_template + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param delete_options_grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. + :type delete_options_grace_period_seconds: str + :param delete_options_preconditions_uid: Specifies the target UID. +optional. + :type delete_options_preconditions_uid: str + :param delete_options_preconditions_resource_version: Specifies the target ResourceVersion +optional. + :type delete_options_preconditions_resource_version: str + :param delete_options_orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. + :type delete_options_orphan_dependents: bool + :param delete_options_propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. + :type delete_options_propagation_policy: str + :param delete_options_dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. + :type delete_options_dry_run: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_workflow_template_serialize( + namespace=namespace, + name=name, + delete_options_grace_period_seconds=delete_options_grace_period_seconds, + delete_options_preconditions_uid=delete_options_preconditions_uid, + delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, + delete_options_orphan_dependents=delete_options_orphan_dependents, + delete_options_propagation_policy=delete_options_propagation_policy, + delete_options_dry_run=delete_options_dry_run, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_workflow_template_serialize( self, namespace, name, - **kwargs - ): - """delete_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_workflow_template(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - delete_options_grace_period_seconds (str): The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional.. [optional] - delete_options_preconditions_uid (str): Specifies the target UID. +optional.. [optional] - delete_options_preconditions_resource_version (str): Specifies the target ResourceVersion +optional.. [optional] - delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] - delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] - delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - bool, date, datetime, dict, float, int, list, str, none_type - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.delete_workflow_template_endpoint.call_with_http_info(**kwargs) + delete_options_grace_period_seconds, + delete_options_preconditions_uid, + delete_options_preconditions_resource_version, + delete_options_orphan_dependents, + delete_options_propagation_policy, + delete_options_dry_run, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'deleteOptions.dryRun': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if delete_options_grace_period_seconds is not None: + + _query_params.append(('deleteOptions.gracePeriodSeconds', delete_options_grace_period_seconds)) + + if delete_options_preconditions_uid is not None: + + _query_params.append(('deleteOptions.preconditions.uid', delete_options_preconditions_uid)) + + if delete_options_preconditions_resource_version is not None: + + _query_params.append(('deleteOptions.preconditions.resourceVersion', delete_options_preconditions_resource_version)) + + if delete_options_orphan_dependents is not None: + + _query_params.append(('deleteOptions.orphanDependents', delete_options_orphan_dependents)) + + if delete_options_propagation_policy is not None: + + _query_params.append(('deleteOptions.propagationPolicy', delete_options_propagation_policy)) + + if delete_options_dry_run is not None: + + _query_params.append(('deleteOptions.dryRun', delete_options_dry_run)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/api/v1/workflow-templates/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def get_workflow_template( + self, + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1WorkflowTemplate: + """get_workflow_template + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_template_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_workflow_template_with_http_info( + self, + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1WorkflowTemplate]: + """get_workflow_template + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_template_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_workflow_template_without_preload_content( + self, + namespace: StrictStr, + name: StrictStr, + get_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_workflow_template + + + :param namespace: (required) + :type namespace: str + :param name: (required) + :type name: str + :param get_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type get_options_resource_version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_template_serialize( + namespace=namespace, + name=name, + get_options_resource_version=get_options_resource_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_workflow_template_serialize( self, namespace, name, - **kwargs - ): - """get_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_workflow_template(namespace, name, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): - - Keyword Args: - get_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1WorkflowTemplate - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - return self.get_workflow_template_endpoint.call_with_http_info(**kwargs) + get_options_resource_version, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + if get_options_resource_version is not None: + + _query_params.append(('getOptions.resourceVersion', get_options_resource_version)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/workflow-templates/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def lint_workflow_template( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1WorkflowTemplate: + """lint_workflow_template + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_workflow_template_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def lint_workflow_template_with_http_info( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1WorkflowTemplate]: + """lint_workflow_template + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_workflow_template_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def lint_workflow_template_without_preload_content( + self, + namespace: StrictStr, + body: IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """lint_workflow_template + + + :param namespace: (required) + :type namespace: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lint_workflow_template_serialize( + namespace=namespace, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _lint_workflow_template_serialize( self, namespace, body, - **kwargs - ): - """lint_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.lint_workflow_template(namespace, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - body (IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1WorkflowTemplate - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['body'] = \ - body - return self.lint_workflow_template_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/api/v1/workflow-templates/{namespace}/lint', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call def list_workflow_templates( + self, + namespace: StrictStr, + name_pattern: Optional[StrictStr] = None, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1WorkflowTemplateList: + """list_workflow_templates + + + :param namespace: (required) + :type namespace: str + :param name_pattern: + :type name_pattern: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_workflow_templates_serialize( + namespace=namespace, + name_pattern=name_pattern, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplateList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_workflow_templates_with_http_info( + self, + namespace: StrictStr, + name_pattern: Optional[StrictStr] = None, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1WorkflowTemplateList]: + """list_workflow_templates + + + :param namespace: (required) + :type namespace: str + :param name_pattern: + :type name_pattern: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_workflow_templates_serialize( + namespace=namespace, + name_pattern=name_pattern, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplateList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_workflow_templates_without_preload_content( + self, + namespace: StrictStr, + name_pattern: Optional[StrictStr] = None, + list_options_label_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.")] = None, + list_options_field_selector: Annotated[Optional[StrictStr], Field(description="A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.")] = None, + list_options_watch: Annotated[Optional[StrictBool], Field(description="Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.")] = None, + list_options_allow_watch_bookmarks: Annotated[Optional[StrictBool], Field(description="allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.")] = None, + list_options_resource_version: Annotated[Optional[StrictStr], Field(description="resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_resource_version_match: Annotated[Optional[StrictStr], Field(description="resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional")] = None, + list_options_timeout_seconds: Annotated[Optional[StrictStr], Field(description="Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.")] = None, + list_options_limit: Annotated[Optional[StrictStr], Field(description="limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.")] = None, + list_options_continue: Annotated[Optional[StrictStr], Field(description="The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list_workflow_templates + + + :param namespace: (required) + :type namespace: str + :param name_pattern: + :type name_pattern: str + :param list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. + :type list_options_label_selector: str + :param list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. + :type list_options_field_selector: str + :param list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. + :type list_options_watch: bool + :param list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. + :type list_options_allow_watch_bookmarks: bool + :param list_options_resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version: str + :param list_options_resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional + :type list_options_resource_version_match: str + :param list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. + :type list_options_timeout_seconds: str + :param list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. + :type list_options_limit: str + :param list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. + :type list_options_continue: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_workflow_templates_serialize( + namespace=namespace, + name_pattern=name_pattern, + list_options_label_selector=list_options_label_selector, + list_options_field_selector=list_options_field_selector, + list_options_watch=list_options_watch, + list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, + list_options_resource_version=list_options_resource_version, + list_options_resource_version_match=list_options_resource_version_match, + list_options_timeout_seconds=list_options_timeout_seconds, + list_options_limit=list_options_limit, + list_options_continue=list_options_continue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplateList", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_workflow_templates_serialize( self, namespace, - **kwargs - ): - """list_workflow_templates # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_workflow_templates(namespace, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - - Keyword Args: - name_pattern (str): [optional] - list_options_label_selector (str): A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.. [optional] - list_options_field_selector (str): A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.. [optional] - list_options_watch (bool): Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.. [optional] - list_options_allow_watch_bookmarks (bool): allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional.. [optional] - list_options_resource_version (str): resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_resource_version_match (str): resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional. [optional] - list_options_timeout_seconds (str): Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.. [optional] - list_options_limit (str): limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.. [optional] - list_options_continue (str): The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1WorkflowTemplateList - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - return self.list_workflow_templates_endpoint.call_with_http_info(**kwargs) + name_pattern, + list_options_label_selector, + list_options_field_selector, + list_options_watch, + list_options_allow_watch_bookmarks, + list_options_resource_version, + list_options_resource_version_match, + list_options_timeout_seconds, + list_options_limit, + list_options_continue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + # process the query parameters + if name_pattern is not None: + + _query_params.append(('namePattern', name_pattern)) + + if list_options_label_selector is not None: + + _query_params.append(('listOptions.labelSelector', list_options_label_selector)) + + if list_options_field_selector is not None: + + _query_params.append(('listOptions.fieldSelector', list_options_field_selector)) + + if list_options_watch is not None: + + _query_params.append(('listOptions.watch', list_options_watch)) + + if list_options_allow_watch_bookmarks is not None: + + _query_params.append(('listOptions.allowWatchBookmarks', list_options_allow_watch_bookmarks)) + + if list_options_resource_version is not None: + + _query_params.append(('listOptions.resourceVersion', list_options_resource_version)) + + if list_options_resource_version_match is not None: + + _query_params.append(('listOptions.resourceVersionMatch', list_options_resource_version_match)) + + if list_options_timeout_seconds is not None: + + _query_params.append(('listOptions.timeoutSeconds', list_options_timeout_seconds)) + + if list_options_limit is not None: + + _query_params.append(('listOptions.limit', list_options_limit)) + + if list_options_continue is not None: + + _query_params.append(('listOptions.continue', list_options_continue)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/api/v1/workflow-templates/{namespace}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def update_workflow_template( + self, + namespace: StrictStr, + name: Annotated[StrictStr, Field(description="DEPRECATED: This field is ignored.")], + body: IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> IoArgoprojWorkflowV1alpha1WorkflowTemplate: + """update_workflow_template + + + :param namespace: (required) + :type namespace: str + :param name: DEPRECATED: This field is ignored. (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_workflow_template_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_workflow_template_with_http_info( + self, + namespace: StrictStr, + name: Annotated[StrictStr, Field(description="DEPRECATED: This field is ignored.")], + body: IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[IoArgoprojWorkflowV1alpha1WorkflowTemplate]: + """update_workflow_template + + + :param namespace: (required) + :type namespace: str + :param name: DEPRECATED: This field is ignored. (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_workflow_template_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_workflow_template_without_preload_content( + self, + namespace: StrictStr, + name: Annotated[StrictStr, Field(description="DEPRECATED: This field is ignored.")], + body: IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update_workflow_template + + + :param namespace: (required) + :type namespace: str + :param name: DEPRECATED: This field is ignored. (required) + :type name: str + :param body: (required) + :type body: IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_workflow_template_serialize( + namespace=namespace, + name=name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "IoArgoprojWorkflowV1alpha1WorkflowTemplate", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_workflow_template_serialize( self, namespace, name, body, - **kwargs - ): - """update_workflow_template # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_workflow_template(namespace, name, body, async_req=True) - >>> result = thread.get() - - Args: - namespace (str): - name (str): DEPRECATED: This field is ignored. - body (IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IoArgoprojWorkflowV1alpha1WorkflowTemplate - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_spec_property_naming'] = kwargs.get( - '_spec_property_naming', False - ) - kwargs['_content_type'] = kwargs.get( - '_content_type') - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['namespace'] = \ - namespace - kwargs['name'] = \ - name - kwargs['body'] = \ - body - return self.update_workflow_template_endpoint.call_with_http_info(**kwargs) + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, str] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if namespace is not None: + _path_params['namespace'] = namespace + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'BearerToken' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/api/v1/workflow-templates/{namespace}/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + diff --git a/sdks/python/client/argo_workflows/api_client.py b/sdks/python/client/argo_workflows/api_client.py index 5d9aa10e07c7..bc2d91fa61b0 100644 --- a/sdks/python/client/argo_workflows/api_client.py +++ b/sdks/python/client/argo_workflows/api_client.py @@ -1,45 +1,46 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 +import datetime +from dateutil.parser import parse +from enum import Enum import json -import atexit import mimetypes -from multiprocessing.pool import ThreadPool -import io import os import re -import typing -from urllib.parse import quote -from urllib3.fields import RequestField +import tempfile +from urllib.parse import quote +from typing import Tuple, Optional, List, Dict -from argo_workflows import rest from argo_workflows.configuration import Configuration -from argo_workflows.exceptions import ApiTypeError, ApiValueError, ApiException -from argo_workflows.model_utils import ( - ModelNormal, - ModelSimple, - ModelComposed, - check_allowed_values, - check_validations, - date, - datetime, - deserialize_file, - file_type, - model_to_dict, - none_type, - validate_and_convert_types +from argo_workflows.api_response import ApiResponse, T as ApiResponseT +import argo_workflows.models +from argo_workflows import rest +from argo_workflows.exceptions import ( + ApiValueError, + ApiException, + BadRequestException, + UnauthorizedException, + ForbiddenException, + NotFoundException, + ServiceException ) +RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]] -class ApiClient(object): +class ApiClient: """Generic API client for OpenAPI client library builds. OpenAPI generic API client. This client handles the client- @@ -47,28 +48,38 @@ class ApiClient(object): the methods and models for each application are generated from the OpenAPI templates. - NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - Do not edit the class manually. - :param configuration: .Configuration object for this client :param header_name: a header to pass when making calls to the API. :param header_value: a header value to pass when making calls to the API. :param cookie: a cookie to include in the header when making calls to the API - :param pool_threads: The number of threads to use for async requests - to the API. More threads means more concurrent API requests. """ + PRIMITIVE_TYPES = (float, bool, bytes, str, int) + NATIVE_TYPES_MAPPING = { + 'int': int, + 'long': int, # TODO remove as only py3 is supported? + 'float': float, + 'str': str, + 'bool': bool, + 'date': datetime.date, + 'datetime': datetime.datetime, + 'object': object, + } _pool = None - def __init__(self, configuration=None, header_name=None, header_value=None, - cookie=None, pool_threads=1): + def __init__( + self, + configuration=None, + header_name=None, + header_value=None, + cookie=None + ) -> None: + # use default configuration if none is provided if configuration is None: - configuration = Configuration.get_default_copy() + configuration = Configuration.get_default() self.configuration = configuration - self.pool_threads = pool_threads self.rest_client = rest.RESTClientObject(configuration) self.default_headers = {} @@ -77,30 +88,13 @@ def __init__(self, configuration=None, header_name=None, header_value=None, self.cookie = cookie # Set default User-Agent. self.user_agent = 'OpenAPI-Generator/0.0.0-pre/python' + self.client_side_validation = configuration.client_side_validation def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): - self.close() - - def close(self): - if self._pool: - self._pool.close() - self._pool.join() - self._pool = None - if hasattr(atexit, 'unregister'): - atexit.unregister(self.close) - - @property - def pool(self): - """Create thread pool on first request - avoids instantiating unused threadpool for blocking clients. - """ - if self._pool is None: - atexit.register(self.close) - self._pool = ThreadPool(self.pool_threads) - return self._pool + pass @property def user_agent(self): @@ -114,26 +108,69 @@ def user_agent(self, value): def set_default_header(self, header_name, header_value): self.default_headers[header_name] = header_value - def __call_api( + + _default = None + + @classmethod + def get_default(cls): + """Return new instance of ApiClient. + + This method returns newly created, based on default constructor, + object of ApiClient class or returns a copy of default + ApiClient. + + :return: The ApiClient object. + """ + if cls._default is None: + cls._default = ApiClient() + return cls._default + + @classmethod + def set_default(cls, default): + """Set default instance of ApiClient. + + It stores default ApiClient. + + :param default: object of ApiClient. + """ + cls._default = default + + def param_serialize( self, - resource_path: str, - method: str, - path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - query_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - header_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - body: typing.Optional[typing.Any] = None, - post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None, - response_type: typing.Optional[typing.Tuple[typing.Any]] = None, - auth_settings: typing.Optional[typing.List[str]] = None, - _return_http_data_only: typing.Optional[bool] = None, - collection_formats: typing.Optional[typing.Dict[str, str]] = None, - _preload_content: bool = True, - _request_timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None, - _host: typing.Optional[str] = None, - _check_type: typing.Optional[bool] = None, - _content_type: typing.Optional[str] = None - ): + method, + resource_path, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, auth_settings=None, + collection_formats=None, + _host=None, + _request_auth=None + ) -> RequestSerialized: + + """Builds the HTTP request params needed by the request. + :param method: Method to call. + :param resource_path: Path to method endpoint. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :return: tuple of form (path, http_method, query_params, header_params, + body, post_params, files) + """ config = self.configuration @@ -144,14 +181,17 @@ def __call_api( header_params['Cookie'] = self.cookie if header_params: header_params = self.sanitize_for_serialization(header_params) - header_params = dict(self.parameters_to_tuples(header_params, - collection_formats)) + header_params = dict( + self.parameters_to_tuples(header_params,collection_formats) + ) # path parameters if path_params: path_params = self.sanitize_for_serialization(path_params) - path_params = self.parameters_to_tuples(path_params, - collection_formats) + path_params = self.parameters_to_tuples( + path_params, + collection_formats + ) for k, v in path_params: # specified safe chars, encode everything resource_path = resource_path.replace( @@ -159,31 +199,31 @@ def __call_api( quote(str(v), safe=config.safe_chars_for_path_param) ) - # query parameters - if query_params: - query_params = self.sanitize_for_serialization(query_params) - query_params = self.parameters_to_tuples(query_params, - collection_formats) - # post parameters if post_params or files: post_params = post_params if post_params else [] post_params = self.sanitize_for_serialization(post_params) - post_params = self.parameters_to_tuples(post_params, - collection_formats) + post_params = self.parameters_to_tuples( + post_params, + collection_formats + ) post_params.extend(self.files_parameters(files)) - if header_params['Content-Type'].startswith("multipart"): - post_params = self.parameters_to_multipart(post_params, - (dict) ) + + # auth setting + self.update_params_for_auth( + header_params, + query_params, + auth_settings, + resource_path, + method, + body, + request_auth=_request_auth + ) # body if body: body = self.sanitize_for_serialization(body) - # auth setting - self.update_params_for_auth(header_params, query_params, - auth_settings, resource_path, method, body) - # request url if _host is None: url = self.configuration.host + resource_path @@ -191,73 +231,106 @@ def __call_api( # use server/host defined in path or operation instead url = _host + resource_path + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + url_query = self.parameters_to_url_query( + query_params, + collection_formats + ) + url += "?" + url_query + + return method, url, header_params, body, post_params + + + def call_api( + self, + method, + url, + header_params=None, + body=None, + post_params=None, + _request_timeout=None + ) -> rest.RESTResponse: + """Makes the HTTP request (synchronous) + :param method: Method to call. + :param url: Path to method endpoint. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param _request_timeout: timeout setting for this request. + :return: RESTResponse + """ + try: # perform request and return response - response_data = self.request( - method, url, query_params=query_params, headers=header_params, - post_params=post_params, body=body, - _preload_content=_preload_content, - _request_timeout=_request_timeout) + response_data = self.rest_client.request( + method, url, + headers=header_params, + body=body, post_params=post_params, + _request_timeout=_request_timeout + ) + except ApiException as e: - e.body = e.body.decode('utf-8') raise e - self.last_response = response_data + return response_data + + def response_deserialize( + self, + response_data: rest.RESTResponse, + response_types_map: Optional[Dict[str, ApiResponseT]]=None + ) -> ApiResponse[ApiResponseT]: + """Deserializes response into an object. + :param response_data: RESTResponse object to be deserialized. + :param response_types_map: dict of response types. + :return: ApiResponse + """ - return_data = response_data + msg = "RESTResponse.read() must be called before passing it to response_deserialize()" + assert response_data.data is not None, msg - if not _preload_content: - return (return_data) - return return_data + response_type = response_types_map.get(str(response_data.status), None) + if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599: + # if not found, look for '1XX', '2XX', etc. + response_type = response_types_map.get(str(response_data.status)[0] + "XX", None) # deserialize response data - if response_type: - if response_type != (file_type,): - encoding = "utf-8" + response_text = None + return_data = None + try: + if response_type == "bytearray": + return_data = response_data.data + elif response_type == "file": + return_data = self.__deserialize_file(response_data) + elif response_type is not None: + match = None content_type = response_data.getheader('content-type') if content_type is not None: - match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type) - if match: - encoding = match.group(1) - response_data.data = response_data.data.decode(encoding) - - return_data = self.deserialize( - response_data, - response_type, - _check_type - ) - else: - return_data = None + match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) + encoding = match.group(1) if match else "utf-8" + response_text = response_data.data.decode(encoding) + return_data = self.deserialize(response_text, response_type) + finally: + if not 200 <= response_data.status <= 299: + raise ApiException.from_response( + http_resp=response_data, + body=response_text, + data=return_data, + ) - if _return_http_data_only: - return (return_data) - else: - return (return_data, response_data.status, - response_data.getheaders()) + return ApiResponse( + status_code = response_data.status, + data = return_data, + headers = response_data.getheaders(), + raw_data = response_data.data + ) - def parameters_to_multipart(self, params, collection_types): - """Get parameters as list of tuples, formatting as json if value is collection_types + def sanitize_for_serialization(self, obj): + """Builds a JSON POST object. - :param params: Parameters as list of two-tuples - :param dict collection_types: Parameter collection types - :return: Parameters as list of tuple or urllib3.fields.RequestField - """ - new_params = [] - if collection_types is None: - collection_types = (dict) - for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 - if isinstance(v, collection_types): # v is instance of collection_type, formatting as application/json - v = json.dumps(v, ensure_ascii=False).encode("utf-8") - field = RequestField(k, v) - field.make_multipart(content_type="application/json; charset=utf-8") - new_params.append(field) - else: - new_params.append((k, v)) - return new_params - - @classmethod - def sanitize_for_serialization(cls, obj): - """Prepares data for transmission before it is sent with the rest client If obj is None, return None. If obj is str, int, long, float, bool, return directly. If obj is datetime.datetime, datetime.date @@ -265,228 +338,102 @@ def sanitize_for_serialization(cls, obj): If obj is list, sanitize each element in the list. If obj is dict, return the dict. If obj is OpenAPI model, return the properties dict. - If obj is io.IOBase, return the bytes + :param obj: The data to serialize. :return: The serialized form of data. """ - if isinstance(obj, (ModelNormal, ModelComposed)): - return { - key: cls.sanitize_for_serialization(val) for key, val in model_to_dict(obj, serialize=True).items() - } - elif isinstance(obj, io.IOBase): - return cls.get_file_data_and_close_file(obj) - elif isinstance(obj, (str, int, float, none_type, bool)): + if obj is None: + return None + elif isinstance(obj, self.PRIMITIVE_TYPES): return obj - elif isinstance(obj, (datetime, date)): + elif isinstance(obj, list): + return [ + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ] + elif isinstance(obj, tuple): + return tuple( + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ) + elif isinstance(obj, (datetime.datetime, datetime.date)): return obj.isoformat() - elif isinstance(obj, ModelSimple): - return cls.sanitize_for_serialization(obj.value) - elif isinstance(obj, (list, tuple)): - return [cls.sanitize_for_serialization(item) for item in obj] - if isinstance(obj, dict): - return {key: cls.sanitize_for_serialization(val) for key, val in obj.items()} - raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__)) - - def deserialize(self, response, response_type, _check_type): + + elif isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + obj_dict = obj.to_dict() + + return { + key: self.sanitize_for_serialization(val) + for key, val in obj_dict.items() + } + + def deserialize(self, response_text, response_type): """Deserializes response into an object. :param response: RESTResponse object to be deserialized. - :param response_type: For the response, a tuple containing: - valid classes - a list containing valid classes (for list schemas) - a dict containing a tuple of valid classes as the value - Example values: - (str,) - (Pet,) - (float, none_type) - ([int, none_type],) - ({str: (bool, str, int, float, date, datetime, str, none_type)},) - :param _check_type: boolean, whether to check the types of the data - received from the server - :type _check_type: bool + :param response_type: class literal for + deserialized object, or string of class name. :return: deserialized object. """ - # handle file downloading - # save response body into a tmp file and return the instance - if response_type == (file_type,): - content_disposition = response.getheader("Content-Disposition") - return deserialize_file(response.data, self.configuration, - content_disposition=content_disposition) # fetch data from response object try: - received_data = json.loads(response.data) + data = json.loads(response_text) except ValueError: - received_data = response.data - - # store our data under the key of 'received_data' so users have some - # context if they are deserializing a string and the data type is wrong - deserialized_data = validate_and_convert_types( - received_data, - response_type, - ['received_data'], - True, - _check_type, - configuration=self.configuration - ) - return deserialized_data + data = response_text - def call_api( - self, - resource_path: str, - method: str, - path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - query_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - header_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - body: typing.Optional[typing.Any] = None, - post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None, - response_type: typing.Optional[typing.Tuple[typing.Any]] = None, - auth_settings: typing.Optional[typing.List[str]] = None, - async_req: typing.Optional[bool] = None, - _return_http_data_only: typing.Optional[bool] = None, - collection_formats: typing.Optional[typing.Dict[str, str]] = None, - _preload_content: bool = True, - _request_timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None, - _host: typing.Optional[str] = None, - _check_type: typing.Optional[bool] = None - ): - """Makes the HTTP request (synchronous) and returns deserialized data. - - To make an async_req request, set the async_req parameter. + return self.__deserialize(data, response_type) - :param resource_path: Path to method endpoint. - :param method: Method to call. - :param path_params: Path parameters in the url. - :param query_params: Query parameters in the url. - :param header_params: Header parameters to be - placed in the request header. - :param body: Request body. - :param post_params dict: Request post form parameters, - for `application/x-www-form-urlencoded`, `multipart/form-data`. - :param auth_settings list: Auth Settings names for the request. - :param response_type: For the response, a tuple containing: - valid classes - a list containing valid classes (for list schemas) - a dict containing a tuple of valid classes as the value - Example values: - (str,) - (Pet,) - (float, none_type) - ([int, none_type],) - ({str: (bool, str, int, float, date, datetime, str, none_type)},) - :param files: key -> field name, value -> a list of open file - objects for `multipart/form-data`. - :type files: dict - :param async_req bool: execute request asynchronously - :type async_req: bool, optional - :param _return_http_data_only: response data without head status code - and headers - :type _return_http_data_only: bool, optional - :param collection_formats: dict of collection formats for path, query, - header, and post parameters. - :type collection_formats: dict, optional - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. - :type _preload_content: bool, optional - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :param _check_type: boolean describing if the data back from the server - should have its type checked. - :type _check_type: bool, optional - :return: - If async_req parameter is True, - the request will be called asynchronously. - The method will return the request thread. - If parameter async_req is False or missing, - then the method will return the response directly. + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. """ - if not async_req: - return self.__call_api(resource_path, method, - path_params, query_params, header_params, - body, post_params, files, - response_type, auth_settings, - _return_http_data_only, collection_formats, - _preload_content, _request_timeout, _host, - _check_type) - - return self.pool.apply_async(self.__call_api, (resource_path, - method, path_params, - query_params, - header_params, body, - post_params, files, - response_type, - auth_settings, - _return_http_data_only, - collection_formats, - _preload_content, - _request_timeout, - _host, _check_type)) - - def request(self, method, url, query_params=None, headers=None, - post_params=None, body=None, _preload_content=True, - _request_timeout=None): - """Makes the HTTP request using RESTClient.""" - if method == "GET": - return self.rest_client.GET(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) - elif method == "HEAD": - return self.rest_client.HEAD(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) - elif method == "OPTIONS": - return self.rest_client.OPTIONS(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "POST": - return self.rest_client.POST(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "PUT": - return self.rest_client.PUT(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "PATCH": - return self.rest_client.PATCH(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "DELETE": - return self.rest_client.DELETE(url, - query_params=query_params, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + if data is None: + return None + + if isinstance(klass, str): + if klass.startswith('List['): + m = re.match(r'List\[(.*)]', klass) + assert m is not None, "Malformed List type definition" + sub_kls = m.group(1) + return [self.__deserialize(sub_data, sub_kls) + for sub_data in data] + + if klass.startswith('Dict['): + m = re.match(r'Dict\[([^,]*), (.*)]', klass) + assert m is not None, "Malformed Dict type definition" + sub_kls = m.group(2) + return {k: self.__deserialize(v, sub_kls) + for k, v in data.items()} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(argo_workflows.models, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datetime(data) + elif issubclass(klass, Enum): + return self.__deserialize_enum(data, klass) else: - raise ApiValueError( - "http method must be `GET`, `HEAD`, `OPTIONS`," - " `POST`, `PATCH`, `PUT` or `DELETE`." - ) + return self.__deserialize_model(data, klass) def parameters_to_tuples(self, params, collection_formats): """Get parameters as list of tuples, formatting collections. @@ -495,10 +442,10 @@ def parameters_to_tuples(self, params, collection_formats): :param dict collection_formats: Parameter collection formats :return: Parameters as list of tuples, collections formatted """ - new_params = [] + new_params: List[Tuple[str, str]] = [] if collection_formats is None: collection_formats = {} - for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 + for k, v in params.items() if isinstance(params, dict) else params: if k in collection_formats: collection_format = collection_formats[k] if collection_format == 'multi': @@ -518,349 +465,294 @@ def parameters_to_tuples(self, params, collection_formats): new_params.append((k, v)) return new_params - @staticmethod - def get_file_data_and_close_file(file_instance: io.IOBase) -> bytes: - file_data = file_instance.read() - file_instance.close() - return file_data + def parameters_to_url_query(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. - def files_parameters(self, files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None): + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: URL query string (e.g. a=Hello%20World&b=123) + """ + new_params: List[Tuple[str, str]] = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: + if isinstance(v, bool): + v = str(v).lower() + if isinstance(v, (int, float)): + v = str(v) + if isinstance(v, dict): + v = json.dumps(v) + + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, str(value)) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(quote(str(value)) for value in v)) + ) + else: + new_params.append((k, quote(str(v)))) + + return "&".join(["=".join(map(str, item)) for item in new_params]) + + def files_parameters(self, files=None): """Builds form parameters. - :param files: None or a dict with key=param_name and - value is a list of open file objects - :return: List of tuples of form parameters with file data + :param files: File parameters. + :return: Form parameters with files. """ - if files is None: - return [] - params = [] - for param_name, file_instances in files.items(): - if file_instances is None: - # if the file field is nullable, skip None values - continue - for file_instance in file_instances: - if file_instance is None: - # if the file field is nullable, skip None values + + if files: + for k, v in files.items(): + if not v: continue - if file_instance.closed is True: - raise ApiValueError( - "Cannot read a closed file. The passed in file_type " - "for %s must be open." % param_name - ) - filename = os.path.basename(file_instance.name) - filedata = self.get_file_data_and_close_file(file_instance) - mimetype = (mimetypes.guess_type(filename)[0] or - 'application/octet-stream') - params.append( - tuple([param_name, tuple([filename, filedata, mimetype])])) + file_names = v if type(v) is list else [v] + for n in file_names: + with open(n, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + mimetype = ( + mimetypes.guess_type(filename)[0] + or 'application/octet-stream' + ) + params.append( + tuple([k, tuple([filename, filedata, mimetype])]) + ) return params - def select_header_accept(self, accepts): + def select_header_accept(self, accepts: List[str]) -> Optional[str]: """Returns `Accept` based on an array of accepts provided. :param accepts: List of headers. :return: Accept (e.g. application/json). """ if not accepts: - return + return None - accepts = [x.lower() for x in accepts] + for accept in accepts: + if re.search('json', accept, re.IGNORECASE): + return accept - if 'application/json' in accepts: - return 'application/json' - else: - return ', '.join(accepts) + return accepts[0] - def select_header_content_type(self, content_types, method=None, body=None): + def select_header_content_type(self, content_types): """Returns `Content-Type` based on an array of content_types provided. :param content_types: List of content-types. - :param method: http method (e.g. POST, PATCH). - :param body: http body to send. :return: Content-Type (e.g. application/json). """ if not content_types: - return 'application/json' - - content_types = [x.lower() for x in content_types] + return None - if (method == 'PATCH' and - 'application/json-patch+json' in content_types and - isinstance(body, list)): - return 'application/json-patch+json' + for content_type in content_types: + if re.search('json', content_type, re.IGNORECASE): + return content_type - if 'application/json' in content_types or '*/*' in content_types: - return 'application/json' - else: - return content_types[0] + return content_types[0] - def update_params_for_auth(self, headers, queries, auth_settings, - resource_path, method, body): + def update_params_for_auth( + self, + headers, + queries, + auth_settings, + resource_path, + method, + body, + request_auth=None + ) -> None: """Updates header and query params based on authentication setting. :param headers: Header parameters dict to be updated. :param queries: Query parameters tuple list to be updated. :param auth_settings: Authentication setting identifiers list. - :param resource_path: A string representation of the HTTP request resource path. - :param method: A string representation of the HTTP request method. - :param body: A object representing the body of the HTTP request. - The object type is the return value of _encoder.default(). + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param request_auth: if set, the provided settings will + override the token in the configuration. """ if not auth_settings: return - for auth in auth_settings: - auth_setting = self.configuration.auth_settings().get(auth) - if auth_setting: - if auth_setting['in'] == 'cookie': - headers['Cookie'] = auth_setting['value'] - elif auth_setting['in'] == 'header': - if auth_setting['type'] != 'http-signature': - headers[auth_setting['key']] = auth_setting['value'] - elif auth_setting['in'] == 'query': - queries.append((auth_setting['key'], auth_setting['value'])) - else: - raise ApiValueError( - 'Authentication token must be in `query` or `header`' + if request_auth: + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + request_auth + ) + else: + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + auth_setting ) + def _apply_auth_params( + self, + headers, + queries, + resource_path, + method, + body, + auth_setting + ) -> None: + """Updates the request parameters based on a single auth_setting -class Endpoint(object): - def __init__(self, settings=None, params_map=None, root_map=None, - headers_map=None, api_client=None, callable=None): - """Creates an endpoint - - Args: - settings (dict): see below key value pairs - 'response_type' (tuple/None): response type - 'auth' (list): a list of auth type keys - 'endpoint_path' (str): the endpoint path - 'operation_id' (str): endpoint string identifier - 'http_method' (str): POST/PUT/PATCH/GET etc - 'servers' (list): list of str servers that this endpoint is at - params_map (dict): see below key value pairs - 'all' (list): list of str endpoint parameter names - 'required' (list): list of required parameter names - 'nullable' (list): list of nullable parameter names - 'enum' (list): list of parameters with enum values - 'validation' (list): list of parameters with validations - root_map - 'validations' (dict): the dict mapping endpoint parameter tuple - paths to their validation dictionaries - 'allowed_values' (dict): the dict mapping endpoint parameter - tuple paths to their allowed_values (enum) dictionaries - 'openapi_types' (dict): param_name to openapi type - 'attribute_map' (dict): param_name to camelCase name - 'location_map' (dict): param_name to 'body', 'file', 'form', - 'header', 'path', 'query' - collection_format_map (dict): param_name to `csv` etc. - headers_map (dict): see below key value pairs - 'accept' (list): list of Accept header strings - 'content_type' (list): list of Content-Type header strings - api_client (ApiClient) api client instance - callable (function): the function which is invoked when the - Endpoint is called + :param headers: Header parameters dict to be updated. + :param queries: Query parameters tuple list to be updated. + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param auth_setting: auth settings for the endpoint """ - self.settings = settings - self.params_map = params_map - self.params_map['all'].extend([ - 'async_req', - '_host_index', - '_preload_content', - '_request_timeout', - '_return_http_data_only', - '_check_input_type', - '_check_return_type', - '_content_type', - '_spec_property_naming' - ]) - self.params_map['nullable'].extend(['_request_timeout']) - self.validations = root_map['validations'] - self.allowed_values = root_map['allowed_values'] - self.openapi_types = root_map['openapi_types'] - extra_types = { - 'async_req': (bool,), - '_host_index': (none_type, int), - '_preload_content': (bool,), - '_request_timeout': (none_type, float, (float,), [float], int, (int,), [int]), - '_return_http_data_only': (bool,), - '_check_input_type': (bool,), - '_check_return_type': (bool,), - '_spec_property_naming': (bool,), - '_content_type': (none_type, str) - } - self.openapi_types.update(extra_types) - self.attribute_map = root_map['attribute_map'] - self.location_map = root_map['location_map'] - self.collection_format_map = root_map['collection_format_map'] - self.headers_map = headers_map - self.api_client = api_client - self.callable = callable - - def __validate_inputs(self, kwargs): - for param in self.params_map['enum']: - if param in kwargs: - check_allowed_values( - self.allowed_values, - (param,), - kwargs[param] - ) + if auth_setting['in'] == 'cookie': + headers['Cookie'] = auth_setting['value'] + elif auth_setting['in'] == 'header': + if auth_setting['type'] != 'http-signature': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + queries.append((auth_setting['key'], auth_setting['value'])) + else: + raise ApiValueError( + 'Authentication token must be in `query` or `header`' + ) - for param in self.params_map['validation']: - if param in kwargs: - check_validations( - self.validations, - (param,), - kwargs[param], - configuration=self.api_client.configuration - ) + def __deserialize_file(self, response): + """Deserializes body to file - if kwargs['_check_input_type'] is False: - return + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + handle file downloading + save response body into a tmp file and return the instance - for key, value in kwargs.items(): - fixed_val = validate_and_convert_types( - value, - self.openapi_types[key], - [key], - kwargs['_spec_property_naming'], - kwargs['_check_input_type'], - configuration=self.api_client.configuration + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + m = re.search( + r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition ) - kwargs[key] = fixed_val - - def __gather_params(self, kwargs): - params = { - 'body': None, - 'collection_format': {}, - 'file': {}, - 'form': [], - 'header': {}, - 'path': {}, - 'query': [] - } + assert m is not None, "Unexpected 'content-disposition' header value" + filename = m.group(1) + path = os.path.join(os.path.dirname(path), filename) - for param_name, param_value in kwargs.items(): - param_location = self.location_map.get(param_name) - if param_location is None: - continue - if param_location: - if param_location == 'body': - params['body'] = param_value - continue - base_name = self.attribute_map[param_name] - if (param_location == 'form' and - self.openapi_types[param_name] == (file_type,)): - params['file'][base_name] = [param_value] - elif (param_location == 'form' and - self.openapi_types[param_name] == ([file_type],)): - # param_value is already a list - params['file'][base_name] = param_value - elif param_location in {'form', 'query'}: - param_value_full = (base_name, param_value) - params[param_location].append(param_value_full) - if param_location not in {'form', 'query'}: - params[param_location][base_name] = param_value - collection_format = self.collection_format_map.get(param_name) - if collection_format: - params['collection_format'][base_name] = collection_format + with open(path, "wb") as f: + f.write(response.data) - return params + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. - def __call__(self, *args, **kwargs): - """ This method is invoked when endpoints are called - Example: + :param data: str. + :param klass: class literal. - api_instance = ArchivedWorkflowServiceApi() - api_instance.delete_archived_workflow # this is an instance of the class Endpoint - api_instance.delete_archived_workflow() # this invokes api_instance.delete_archived_workflow.__call__() - which then invokes the callable functions stored in that endpoint at - api_instance.delete_archived_workflow.callable or self.callable in this class + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return str(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + :return: object. """ - return self.callable(self, *args, **kwargs) + return value - def call_with_http_info(self, **kwargs): + def __deserialize_date(self, string): + """Deserializes string to date. + :param string: str. + :return: date. + """ try: - index = self.api_client.configuration.server_operation_index.get( - self.settings['operation_id'], self.api_client.configuration.server_index - ) if kwargs['_host_index'] is None else kwargs['_host_index'] - server_variables = self.api_client.configuration.server_operation_variables.get( - self.settings['operation_id'], self.api_client.configuration.server_variables - ) - _host = self.api_client.configuration.get_host_from_settings( - index, variables=server_variables, servers=self.settings['servers'] + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason="Failed to parse `{0}` as date object".format(string) ) - except IndexError: - if self.settings['servers']: - raise ApiValueError( - "Invalid host index. Must be 0 <= index < %s" % - len(self.settings['servers']) - ) - _host = None - - for key, value in kwargs.items(): - if key not in self.params_map['all']: - raise ApiTypeError( - "Got an unexpected parameter '%s'" - " to method `%s`" % - (key, self.settings['operation_id']) - ) - # only throw this nullable ApiValueError if _check_input_type - # is False, if _check_input_type==True we catch this case - # in self.__validate_inputs - if (key not in self.params_map['nullable'] and value is None - and kwargs['_check_input_type'] is False): - raise ApiValueError( - "Value may not be None for non-nullable parameter `%s`" - " when calling `%s`" % - (key, self.settings['operation_id']) - ) - for key in self.params_map['required']: - if key not in kwargs.keys(): - raise ApiValueError( - "Missing the required parameter `%s` when calling " - "`%s`" % (key, self.settings['operation_id']) + def __deserialize_datetime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as datetime object" + .format(string) ) + ) - self.__validate_inputs(kwargs) + def __deserialize_enum(self, data, klass): + """Deserializes primitive type to enum. - params = self.__gather_params(kwargs) + :param data: primitive type. + :param klass: class literal. + :return: enum value. + """ + try: + return klass(data) + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as `{1}`" + .format(data, klass) + ) + ) - accept_headers_list = self.headers_map['accept'] - if accept_headers_list: - params['header']['Accept'] = self.api_client.select_header_accept( - accept_headers_list) + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. - if kwargs.get('_content_type'): - params['header']['Content-Type'] = kwargs['_content_type'] - else: - content_type_headers_list = self.headers_map['content_type'] - if content_type_headers_list: - if params['body'] != "": - header_list = self.api_client.select_header_content_type( - content_type_headers_list, self.settings['http_method'], - params['body']) - params['header']['Content-Type'] = header_list - - return self.api_client.call_api( - self.settings['endpoint_path'], self.settings['http_method'], - params['path'], - params['query'], - params['header'], - body=params['body'], - post_params=params['form'], - files=params['file'], - response_type=self.settings['response_type'], - auth_settings=self.settings['auth'], - async_req=kwargs['async_req'], - _check_type=kwargs['_check_return_type'], - _return_http_data_only=kwargs['_return_http_data_only'], - _preload_content=kwargs['_preload_content'], - _request_timeout=kwargs['_request_timeout'], - _host=_host, - collection_formats=params['collection_format']) + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + + return klass.from_dict(data) diff --git a/sdks/python/client/argo_workflows/api_response.py b/sdks/python/client/argo_workflows/api_response.py new file mode 100644 index 000000000000..9bc7c11f6b9f --- /dev/null +++ b/sdks/python/client/argo_workflows/api_response.py @@ -0,0 +1,21 @@ +"""API response object.""" + +from __future__ import annotations +from typing import Optional, Generic, Mapping, TypeVar +from pydantic import Field, StrictInt, StrictBytes, BaseModel + +T = TypeVar("T") + +class ApiResponse(BaseModel, Generic[T]): + """ + API response object + """ + + status_code: StrictInt = Field(description="HTTP status code") + headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers") + data: T = Field(description="Deserialized data given the data type") + raw_data: StrictBytes = Field(description="Raw data (HTTP response body)") + + model_config = { + "arbitrary_types_allowed": True + } diff --git a/sdks/python/client/argo_workflows/apis/__init__.py b/sdks/python/client/argo_workflows/apis/__init__.py deleted file mode 100644 index bbd2e429fa68..000000000000 --- a/sdks/python/client/argo_workflows/apis/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ - -# flake8: noqa - -# Import all APIs into this package. -# If you have many APIs here with many many models used in each API this may -# raise a `RecursionError`. -# In order to avoid this, import only the API that you directly need like: -# -# from .api.archived_workflow_service_api import ArchivedWorkflowServiceApi -# -# or import this package, but before doing it, use: -# -# import sys -# sys.setrecursionlimit(n) - -# Import APIs into API package: -from argo_workflows.api.archived_workflow_service_api import ArchivedWorkflowServiceApi -from argo_workflows.api.artifact_service_api import ArtifactServiceApi -from argo_workflows.api.cluster_workflow_template_service_api import ClusterWorkflowTemplateServiceApi -from argo_workflows.api.cron_workflow_service_api import CronWorkflowServiceApi -from argo_workflows.api.event_service_api import EventServiceApi -from argo_workflows.api.event_source_service_api import EventSourceServiceApi -from argo_workflows.api.info_service_api import InfoServiceApi -from argo_workflows.api.sensor_service_api import SensorServiceApi -from argo_workflows.api.workflow_service_api import WorkflowServiceApi -from argo_workflows.api.workflow_template_service_api import WorkflowTemplateServiceApi diff --git a/sdks/python/client/argo_workflows/configuration.py b/sdks/python/client/argo_workflows/configuration.py index 58c5cf6b00de..a7c44bb32538 100644 --- a/sdks/python/client/argo_workflows/configuration.py +++ b/sdks/python/client/argo_workflows/configuration.py @@ -1,22 +1,26 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 import copy import logging +from logging import FileHandler import multiprocessing import sys +from typing import Optional import urllib3 -from http import client as http_client -from argo_workflows.exceptions import ApiValueError - +import http.client as httplib JSON_SCHEMA_VALIDATION_KEYWORDS = { 'multipleOf', 'maximum', 'exclusiveMaximum', @@ -24,46 +28,20 @@ 'minLength', 'pattern', 'maxItems', 'minItems' } -class Configuration(object): - """NOTE: This class is auto generated by OpenAPI Generator - - Ref: https://openapi-generator.tech - Do not edit the class manually. +class Configuration: + """This class contains various settings of the API client. - :param host: Base url + :param host: Base url. :param api_key: Dict to store API key(s). Each entry in the dict specifies an API key. The dict key is the name of the security scheme in the OAS specification. The dict value is the API key secret. - :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + :param api_key_prefix: Dict to store API prefix (e.g. Bearer). The dict key is the name of the security scheme in the OAS specification. The dict value is an API key prefix when generating the auth data. - :param username: Username for HTTP basic authentication - :param password: Password for HTTP basic authentication - :param discard_unknown_keys: Boolean value indicating whether to discard - unknown properties. A server may send a response that includes additional - properties that are not known by the client in the following scenarios: - 1. The OpenAPI document is incomplete, i.e. it does not match the server - implementation. - 2. The client was generated using an older version of the OpenAPI document - and the server has been upgraded since then. - If a schema in the OpenAPI document defines the additionalProperties attribute, - then all undeclared properties received by the server are injected into the - additional properties map. In that case, there are undeclared properties, and - nothing to discard. - :param disabled_client_side_validations (string): Comma-separated list of - JSON schema validation keywords to disable JSON schema structural validation - rules. The following keywords may be specified: multipleOf, maximum, - exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, - maxItems, minItems. - By default, the validation is performed for data generated locally by the client - and data received from the server, independent of any validation performed by - the server side. If the input data does not satisfy the JSON schema validation - rules specified in the OpenAPI document, an exception is raised. - If disabled_client_side_validations is set, structural validation is - disabled. This can be useful to troubleshoot data validation problem, such as - when the OpenAPI document validation rules do not match the actual API data - received by the server. + :param username: Username for HTTP basic authentication. + :param password: Password for HTTP basic authentication. + :param access_token: Access token. :param server_index: Index to servers configuration. :param server_variables: Mapping with string values to replace variables in templated server configuration. The validation of enums is performed for @@ -72,9 +50,10 @@ class Configuration(object): configuration. :param server_operation_variables: Mapping from operation ID to a mapping with string values to replace variables in templated server configuration. - The validation of enums is performed for variables with defined enum values before. + The validation of enums is performed for variables with defined enum + values before. :param ssl_ca_cert: str - the path to a file of concatenated CA certificates - in PEM format + in PEM format. :Example: @@ -102,14 +81,12 @@ class Configuration(object): def __init__(self, host=None, api_key=None, api_key_prefix=None, - access_token=None, username=None, password=None, - discard_unknown_keys=False, - disabled_client_side_validations="", + access_token=None, server_index=None, server_variables=None, server_operation_index=None, server_operation_variables=None, ssl_ca_cert=None, - ): + ) -> None: """Constructor """ self._base_path = "http://localhost:2746" if host is None else host @@ -127,7 +104,6 @@ def __init__(self, host=None, """Temp file folder for downloading files """ # Authentication Settings - self.access_token = access_token self.api_key = {} if api_key: self.api_key = api_key @@ -147,8 +123,9 @@ def __init__(self, host=None, self.password = password """Password for HTTP basic authentication """ - self.discard_unknown_keys = discard_unknown_keys - self.disabled_client_side_validations = disabled_client_side_validations + self.access_token = access_token + """Access token + """ self.logger = {} """Logging Settings """ @@ -160,7 +137,7 @@ def __init__(self, host=None, self.logger_stream_handler = None """Log stream handler """ - self.logger_file_handler = None + self.logger_file_handler: Optional[FileHandler] = None """Log file handler """ self.logger_file = None @@ -187,6 +164,10 @@ def __init__(self, host=None, self.assert_hostname = None """Set this to True/False to enable/disable SSL hostname verification. """ + self.tls_server_name = None + """SSL/TLS Server Name Indication (SNI) + Set this to the SNI value expected by the server. + """ self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 """urllib3 connection pool's maximum number of connections saved @@ -196,12 +177,9 @@ def __init__(self, host=None, cpu_count * 5 is used as default value to increase performance. """ - self.proxy = None + self.proxy: Optional[str] = None """Proxy URL """ - self.no_proxy = None - """bypass proxy for host in the no_proxy list. - """ self.proxy_headers = None """Proxy headers """ @@ -214,8 +192,17 @@ def __init__(self, host=None, # Enable client side validation self.client_side_validation = True - # Options to pass down to the underlying urllib3 socket self.socket_options = None + """Options to pass down to the underlying urllib3 socket + """ + + self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z" + """datetime format + """ + + self.date_format = "%Y-%m-%d" + """date format + """ def __deepcopy__(self, memo): cls = self.__class__ @@ -233,13 +220,6 @@ def __deepcopy__(self, memo): def __setattr__(self, name, value): object.__setattr__(self, name, value) - if name == 'disabled_client_side_validations': - s = set(filter(None, value.split(','))) - for v in s: - if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: - raise ApiValueError( - "Invalid keyword: '{0}''".format(v)) - self._disabled_client_side_validations = s @classmethod def set_default(cls, default): @@ -250,21 +230,31 @@ def set_default(cls, default): :param default: object of Configuration """ - cls._default = copy.deepcopy(default) + cls._default = default @classmethod def get_default_copy(cls): - """Return new instance of configuration. + """Deprecated. Please use `get_default` instead. + + Deprecated. Please use `get_default` instead. + + :return: The configuration object. + """ + return cls.get_default() + + @classmethod + def get_default(cls): + """Return the default configuration. This method returns newly created, based on default constructor, object of Configuration class or returns a copy of default - configuration passed by the set_default method. + configuration. :return: The configuration object. """ - if cls._default is not None: - return copy.deepcopy(cls._default) - return Configuration() + if cls._default is None: + cls._default = Configuration() + return cls._default @property def logger_file(self): @@ -318,15 +308,15 @@ def debug(self, value): # if debug status is True, turn on debug logging for _, logger in self.logger.items(): logger.setLevel(logging.DEBUG) - # turn on http_client debug - http_client.HTTPConnection.debuglevel = 1 + # turn on httplib debug + httplib.HTTPConnection.debuglevel = 1 else: # if debug status is False, turn off debug logging, # setting log level to default `logging.WARNING` for _, logger in self.logger.items(): logger.setLevel(logging.WARNING) - # turn off http_client debug - http_client.HTTPConnection.debuglevel = 0 + # turn off httplib debug + httplib.HTTPConnection.debuglevel = 0 @property def logger_format(self): diff --git a/sdks/python/client/argo_workflows/exceptions.py b/sdks/python/client/argo_workflows/exceptions.py index b152a42786dc..563a6de53a7d 100644 --- a/sdks/python/client/argo_workflows/exceptions.py +++ b/sdks/python/client/argo_workflows/exceptions.py @@ -1,13 +1,18 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + Do not edit the class manually. +""" # noqa: E501 +from typing import Any, Optional +from typing_extensions import Self class OpenApiException(Exception): """The base exception class for all OpenAPIExceptions""" @@ -15,7 +20,7 @@ class OpenApiException(Exception): class ApiTypeError(OpenApiException, TypeError): def __init__(self, msg, path_to_item=None, valid_classes=None, - key_type=None): + key_type=None) -> None: """ Raises an exception for TypeErrors Args: @@ -43,7 +48,7 @@ def __init__(self, msg, path_to_item=None, valid_classes=None, class ApiValueError(OpenApiException, ValueError): - def __init__(self, msg, path_to_item=None): + def __init__(self, msg, path_to_item=None) -> None: """ Args: msg (str): the exception message @@ -61,7 +66,7 @@ def __init__(self, msg, path_to_item=None): class ApiAttributeError(OpenApiException, AttributeError): - def __init__(self, msg, path_to_item=None): + def __init__(self, msg, path_to_item=None) -> None: """ Raised when an attribute reference or assignment fails. @@ -80,7 +85,7 @@ def __init__(self, msg, path_to_item=None): class ApiKeyError(OpenApiException, KeyError): - def __init__(self, msg, path_to_item=None): + def __init__(self, msg, path_to_item=None) -> None: """ Args: msg (str): the exception message @@ -98,17 +103,56 @@ def __init__(self, msg, path_to_item=None): class ApiException(OpenApiException): - def __init__(self, status=None, reason=None, http_resp=None): + def __init__( + self, + status=None, + reason=None, + http_resp=None, + *, + body: Optional[str] = None, + data: Optional[Any] = None, + ) -> None: + self.status = status + self.reason = reason + self.body = body + self.data = data + self.headers = None + if http_resp: - self.status = http_resp.status - self.reason = http_resp.reason - self.body = http_resp.data + if self.status is None: + self.status = http_resp.status + if self.reason is None: + self.reason = http_resp.reason + if self.body is None: + try: + self.body = http_resp.data.decode('utf-8') + except Exception: + pass self.headers = http_resp.getheaders() - else: - self.status = status - self.reason = reason - self.body = None - self.headers = None + + @classmethod + def from_response( + cls, + *, + http_resp, + body: Optional[str], + data: Optional[Any], + ) -> Self: + if http_resp.status == 400: + raise BadRequestException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 401: + raise UnauthorizedException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 403: + raise ForbiddenException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 404: + raise NotFoundException(http_resp=http_resp, body=body, data=data) + + if 500 <= http_resp.status <= 599: + raise ServiceException(http_resp=http_resp, body=body, data=data) + raise ApiException(http_resp=http_resp, body=body, data=data) def __str__(self): """Custom error messages for exception""" @@ -118,34 +162,30 @@ def __str__(self): error_message += "HTTP response headers: {0}\n".format( self.headers) - if self.body: - error_message += "HTTP response body: {0}\n".format(self.body) + if self.data or self.body: + error_message += "HTTP response body: {0}\n".format(self.data or self.body) return error_message -class NotFoundException(ApiException): +class BadRequestException(ApiException): + pass - def __init__(self, status=None, reason=None, http_resp=None): - super(NotFoundException, self).__init__(status, reason, http_resp) +class NotFoundException(ApiException): + pass -class UnauthorizedException(ApiException): - def __init__(self, status=None, reason=None, http_resp=None): - super(UnauthorizedException, self).__init__(status, reason, http_resp) +class UnauthorizedException(ApiException): + pass class ForbiddenException(ApiException): - - def __init__(self, status=None, reason=None, http_resp=None): - super(ForbiddenException, self).__init__(status, reason, http_resp) + pass class ServiceException(ApiException): - - def __init__(self, status=None, reason=None, http_resp=None): - super(ServiceException, self).__init__(status, reason, http_resp) + pass def render_path(path_to_item): diff --git a/sdks/python/client/argo_workflows/model/__init__.py b/sdks/python/client/argo_workflows/model/__init__.py deleted file mode 100644 index cfe32b784926..000000000000 --- a/sdks/python/client/argo_workflows/model/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# we can not import model classes here because that would create a circular -# reference which would not work in python2 -# do not import all models into this module because that uses a lot of memory and stack frames -# if you need the ability to import all models from one package, import them with -# from {{packageName}.models import ModelA, ModelB diff --git a/sdks/python/client/argo_workflows/model/affinity.py b/sdks/python/client/argo_workflows/model/affinity.py deleted file mode 100644 index 16d6a91e58d0..000000000000 --- a/sdks/python/client/argo_workflows/model/affinity.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.node_affinity import NodeAffinity - from argo_workflows.model.pod_affinity import PodAffinity - from argo_workflows.model.pod_anti_affinity import PodAntiAffinity - globals()['NodeAffinity'] = NodeAffinity - globals()['PodAffinity'] = PodAffinity - globals()['PodAntiAffinity'] = PodAntiAffinity - - -class Affinity(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'node_affinity': (NodeAffinity,), # noqa: E501 - 'pod_affinity': (PodAffinity,), # noqa: E501 - 'pod_anti_affinity': (PodAntiAffinity,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'node_affinity': 'nodeAffinity', # noqa: E501 - 'pod_affinity': 'podAffinity', # noqa: E501 - 'pod_anti_affinity': 'podAntiAffinity', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """Affinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - node_affinity (NodeAffinity): [optional] # noqa: E501 - pod_affinity (PodAffinity): [optional] # noqa: E501 - pod_anti_affinity (PodAntiAffinity): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """Affinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - node_affinity (NodeAffinity): [optional] # noqa: E501 - pod_affinity (PodAffinity): [optional] # noqa: E501 - pod_anti_affinity (PodAntiAffinity): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/aws_elastic_block_store_volume_source.py b/sdks/python/client/argo_workflows/model/aws_elastic_block_store_volume_source.py deleted file mode 100644 index 2e920aa7dcd5..000000000000 --- a/sdks/python/client/argo_workflows/model/aws_elastic_block_store_volume_source.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class AWSElasticBlockStoreVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'volume_id': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'partition': (int,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'volume_id': 'volumeID', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'partition': 'partition', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, volume_id, *args, **kwargs): # noqa: E501 - """AWSElasticBlockStoreVolumeSource - a model defined in OpenAPI - - Args: - volume_id (str): Unique ID of the persistent disk resource in AWS (Amazon EBS volume). More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore. [optional] # noqa: E501 - partition (int): The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty).. [optional] # noqa: E501 - read_only (bool): Specify \"true\" to force and set the ReadOnly property in VolumeMounts to \"true\". If omitted, the default is \"false\". More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.volume_id = volume_id - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, volume_id, *args, **kwargs): # noqa: E501 - """AWSElasticBlockStoreVolumeSource - a model defined in OpenAPI - - Args: - volume_id (str): Unique ID of the persistent disk resource in AWS (Amazon EBS volume). More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore. [optional] # noqa: E501 - partition (int): The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty).. [optional] # noqa: E501 - read_only (bool): Specify \"true\" to force and set the ReadOnly property in VolumeMounts to \"true\". If omitted, the default is \"false\". More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.volume_id = volume_id - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/azure_disk_volume_source.py b/sdks/python/client/argo_workflows/model/azure_disk_volume_source.py deleted file mode 100644 index 17ee5b9f0c8e..000000000000 --- a/sdks/python/client/argo_workflows/model/azure_disk_volume_source.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class AzureDiskVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'disk_name': (str,), # noqa: E501 - 'disk_uri': (str,), # noqa: E501 - 'caching_mode': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'disk_name': 'diskName', # noqa: E501 - 'disk_uri': 'diskURI', # noqa: E501 - 'caching_mode': 'cachingMode', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'kind': 'kind', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, disk_name, disk_uri, *args, **kwargs): # noqa: E501 - """AzureDiskVolumeSource - a model defined in OpenAPI - - Args: - disk_name (str): The Name of the data disk in the blob storage - disk_uri (str): The URI the data disk in the blob storage - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - caching_mode (str): Host Caching mode: None, Read Only, Read Write.. [optional] # noqa: E501 - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - kind (str): Expected values Shared: multiple blob disks per storage account Dedicated: single blob disk per storage account Managed: azure managed data disk (only in managed availability set). defaults to shared. [optional] # noqa: E501 - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.disk_name = disk_name - self.disk_uri = disk_uri - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, disk_name, disk_uri, *args, **kwargs): # noqa: E501 - """AzureDiskVolumeSource - a model defined in OpenAPI - - Args: - disk_name (str): The Name of the data disk in the blob storage - disk_uri (str): The URI the data disk in the blob storage - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - caching_mode (str): Host Caching mode: None, Read Only, Read Write.. [optional] # noqa: E501 - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - kind (str): Expected values Shared: multiple blob disks per storage account Dedicated: single blob disk per storage account Managed: azure managed data disk (only in managed availability set). defaults to shared. [optional] # noqa: E501 - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.disk_name = disk_name - self.disk_uri = disk_uri - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/azure_file_volume_source.py b/sdks/python/client/argo_workflows/model/azure_file_volume_source.py deleted file mode 100644 index 506429512cb4..000000000000 --- a/sdks/python/client/argo_workflows/model/azure_file_volume_source.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class AzureFileVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'secret_name': (str,), # noqa: E501 - 'share_name': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'secret_name': 'secretName', # noqa: E501 - 'share_name': 'shareName', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, secret_name, share_name, *args, **kwargs): # noqa: E501 - """AzureFileVolumeSource - a model defined in OpenAPI - - Args: - secret_name (str): the name of secret that contains Azure Storage Account Name and Key - share_name (str): Share Name - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.secret_name = secret_name - self.share_name = share_name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, secret_name, share_name, *args, **kwargs): # noqa: E501 - """AzureFileVolumeSource - a model defined in OpenAPI - - Args: - secret_name (str): the name of secret that contains Azure Storage Account Name and Key - share_name (str): Share Name - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.secret_name = secret_name - self.share_name = share_name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/capabilities.py b/sdks/python/client/argo_workflows/model/capabilities.py deleted file mode 100644 index 9ea5e307b8a8..000000000000 --- a/sdks/python/client/argo_workflows/model/capabilities.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class Capabilities(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'add': ([str],), # noqa: E501 - 'drop': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'add': 'add', # noqa: E501 - 'drop': 'drop', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """Capabilities - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - add ([str]): Added capabilities. [optional] # noqa: E501 - drop ([str]): Removed capabilities. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """Capabilities - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - add ([str]): Added capabilities. [optional] # noqa: E501 - drop ([str]): Removed capabilities. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/ceph_fs_volume_source.py b/sdks/python/client/argo_workflows/model/ceph_fs_volume_source.py deleted file mode 100644 index 9c998a222022..000000000000 --- a/sdks/python/client/argo_workflows/model/ceph_fs_volume_source.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.local_object_reference import LocalObjectReference - globals()['LocalObjectReference'] = LocalObjectReference - - -class CephFSVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'monitors': ([str],), # noqa: E501 - 'path': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'secret_file': (str,), # noqa: E501 - 'secret_ref': (LocalObjectReference,), # noqa: E501 - 'user': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'monitors': 'monitors', # noqa: E501 - 'path': 'path', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'secret_file': 'secretFile', # noqa: E501 - 'secret_ref': 'secretRef', # noqa: E501 - 'user': 'user', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, monitors, *args, **kwargs): # noqa: E501 - """CephFSVolumeSource - a model defined in OpenAPI - - Args: - monitors ([str]): Required: Monitors is a collection of Ceph monitors More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - path (str): Optional: Used as the mounted root, rather than the full Ceph tree, default is /. [optional] # noqa: E501 - read_only (bool): Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it. [optional] # noqa: E501 - secret_file (str): Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - user (str): Optional: User is the rados user name, default is admin More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.monitors = monitors - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, monitors, *args, **kwargs): # noqa: E501 - """CephFSVolumeSource - a model defined in OpenAPI - - Args: - monitors ([str]): Required: Monitors is a collection of Ceph monitors More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - path (str): Optional: Used as the mounted root, rather than the full Ceph tree, default is /. [optional] # noqa: E501 - read_only (bool): Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it. [optional] # noqa: E501 - secret_file (str): Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - user (str): Optional: User is the rados user name, default is admin More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.monitors = monitors - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/cinder_volume_source.py b/sdks/python/client/argo_workflows/model/cinder_volume_source.py deleted file mode 100644 index fec29d096506..000000000000 --- a/sdks/python/client/argo_workflows/model/cinder_volume_source.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.local_object_reference import LocalObjectReference - globals()['LocalObjectReference'] = LocalObjectReference - - -class CinderVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'volume_id': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'secret_ref': (LocalObjectReference,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'volume_id': 'volumeID', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'secret_ref': 'secretRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, volume_id, *args, **kwargs): # noqa: E501 - """CinderVolumeSource - a model defined in OpenAPI - - Args: - volume_id (str): volume id used to identify the volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://examples.k8s.io/mysql-cinder-pd/README.md. [optional] # noqa: E501 - read_only (bool): Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/mysql-cinder-pd/README.md. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.volume_id = volume_id - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, volume_id, *args, **kwargs): # noqa: E501 - """CinderVolumeSource - a model defined in OpenAPI - - Args: - volume_id (str): volume id used to identify the volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://examples.k8s.io/mysql-cinder-pd/README.md. [optional] # noqa: E501 - read_only (bool): Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/mysql-cinder-pd/README.md. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.volume_id = volume_id - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/config_map_env_source.py b/sdks/python/client/argo_workflows/model/config_map_env_source.py deleted file mode 100644 index c01e403afeb8..000000000000 --- a/sdks/python/client/argo_workflows/model/config_map_env_source.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ConfigMapEnvSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'optional': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'optional': 'optional', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ConfigMapEnvSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the ConfigMap must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ConfigMapEnvSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the ConfigMap must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/config_map_key_selector.py b/sdks/python/client/argo_workflows/model/config_map_key_selector.py deleted file mode 100644 index 73d37d46d1cc..000000000000 --- a/sdks/python/client/argo_workflows/model/config_map_key_selector.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ConfigMapKeySelector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'optional': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'name': 'name', # noqa: E501 - 'optional': 'optional', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, *args, **kwargs): # noqa: E501 - """ConfigMapKeySelector - a model defined in OpenAPI - - Args: - key (str): The key to select. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the ConfigMap or its key must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, *args, **kwargs): # noqa: E501 - """ConfigMapKeySelector - a model defined in OpenAPI - - Args: - key (str): The key to select. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the ConfigMap or its key must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/config_map_projection.py b/sdks/python/client/argo_workflows/model/config_map_projection.py deleted file mode 100644 index c806a2c2f636..000000000000 --- a/sdks/python/client/argo_workflows/model/config_map_projection.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.key_to_path import KeyToPath - globals()['KeyToPath'] = KeyToPath - - -class ConfigMapProjection(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([KeyToPath],), # noqa: E501 - 'name': (str,), # noqa: E501 - 'optional': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'name': 'name', # noqa: E501 - 'optional': 'optional', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ConfigMapProjection - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([KeyToPath]): If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.. [optional] # noqa: E501 - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the ConfigMap or its keys must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ConfigMapProjection - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([KeyToPath]): If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.. [optional] # noqa: E501 - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the ConfigMap or its keys must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/config_map_volume_source.py b/sdks/python/client/argo_workflows/model/config_map_volume_source.py deleted file mode 100644 index 1cbd2ecced62..000000000000 --- a/sdks/python/client/argo_workflows/model/config_map_volume_source.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.key_to_path import KeyToPath - globals()['KeyToPath'] = KeyToPath - - -class ConfigMapVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'default_mode': (int,), # noqa: E501 - 'items': ([KeyToPath],), # noqa: E501 - 'name': (str,), # noqa: E501 - 'optional': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'default_mode': 'defaultMode', # noqa: E501 - 'items': 'items', # noqa: E501 - 'name': 'name', # noqa: E501 - 'optional': 'optional', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ConfigMapVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default_mode (int): Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - items ([KeyToPath]): If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.. [optional] # noqa: E501 - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the ConfigMap or its keys must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ConfigMapVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default_mode (int): Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - items ([KeyToPath]): If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.. [optional] # noqa: E501 - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the ConfigMap or its keys must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/container.py b/sdks/python/client/argo_workflows/model/container.py deleted file mode 100644 index 228ff2fabb8a..000000000000 --- a/sdks/python/client/argo_workflows/model/container.py +++ /dev/null @@ -1,376 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.container_port import ContainerPort - from argo_workflows.model.env_from_source import EnvFromSource - from argo_workflows.model.env_var import EnvVar - from argo_workflows.model.lifecycle import Lifecycle - from argo_workflows.model.probe import Probe - from argo_workflows.model.resource_requirements import ResourceRequirements - from argo_workflows.model.security_context import SecurityContext - from argo_workflows.model.volume_device import VolumeDevice - from argo_workflows.model.volume_mount import VolumeMount - globals()['ContainerPort'] = ContainerPort - globals()['EnvFromSource'] = EnvFromSource - globals()['EnvVar'] = EnvVar - globals()['Lifecycle'] = Lifecycle - globals()['Probe'] = Probe - globals()['ResourceRequirements'] = ResourceRequirements - globals()['SecurityContext'] = SecurityContext - globals()['VolumeDevice'] = VolumeDevice - globals()['VolumeMount'] = VolumeMount - - -class Container(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ('image_pull_policy',): { - 'ALWAYS': "Always", - 'IFNOTPRESENT': "IfNotPresent", - 'NEVER': "Never", - }, - ('termination_message_policy',): { - 'FALLBACKTOLOGSONERROR': "FallbackToLogsOnError", - 'FILE': "File", - }, - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'image': (str,), # noqa: E501 - 'args': ([str],), # noqa: E501 - 'command': ([str],), # noqa: E501 - 'env': ([EnvVar],), # noqa: E501 - 'env_from': ([EnvFromSource],), # noqa: E501 - 'image_pull_policy': (str,), # noqa: E501 - 'lifecycle': (Lifecycle,), # noqa: E501 - 'liveness_probe': (Probe,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'ports': ([ContainerPort],), # noqa: E501 - 'readiness_probe': (Probe,), # noqa: E501 - 'resources': (ResourceRequirements,), # noqa: E501 - 'security_context': (SecurityContext,), # noqa: E501 - 'startup_probe': (Probe,), # noqa: E501 - 'stdin': (bool,), # noqa: E501 - 'stdin_once': (bool,), # noqa: E501 - 'termination_message_path': (str,), # noqa: E501 - 'termination_message_policy': (str,), # noqa: E501 - 'tty': (bool,), # noqa: E501 - 'volume_devices': ([VolumeDevice],), # noqa: E501 - 'volume_mounts': ([VolumeMount],), # noqa: E501 - 'working_dir': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'image': 'image', # noqa: E501 - 'args': 'args', # noqa: E501 - 'command': 'command', # noqa: E501 - 'env': 'env', # noqa: E501 - 'env_from': 'envFrom', # noqa: E501 - 'image_pull_policy': 'imagePullPolicy', # noqa: E501 - 'lifecycle': 'lifecycle', # noqa: E501 - 'liveness_probe': 'livenessProbe', # noqa: E501 - 'name': 'name', # noqa: E501 - 'ports': 'ports', # noqa: E501 - 'readiness_probe': 'readinessProbe', # noqa: E501 - 'resources': 'resources', # noqa: E501 - 'security_context': 'securityContext', # noqa: E501 - 'startup_probe': 'startupProbe', # noqa: E501 - 'stdin': 'stdin', # noqa: E501 - 'stdin_once': 'stdinOnce', # noqa: E501 - 'termination_message_path': 'terminationMessagePath', # noqa: E501 - 'termination_message_policy': 'terminationMessagePolicy', # noqa: E501 - 'tty': 'tty', # noqa: E501 - 'volume_devices': 'volumeDevices', # noqa: E501 - 'volume_mounts': 'volumeMounts', # noqa: E501 - 'working_dir': 'workingDir', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, image, *args, **kwargs): # noqa: E501 - """Container - a model defined in OpenAPI - - Args: - image (str): Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 - env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images Possible enum values: - `\"Always\"` means that kubelet always attempts to pull the latest image. Container will fail If the pull fails. - `\"IfNotPresent\"` means that kubelet pulls if the image isn't present on disk. Container will fail if the image isn't present and the pull fails. - `\"Never\"` means that kubelet never pulls an image, but only uses a local image. Container will fail if the image isn't present. [optional] # noqa: E501 - lifecycle (Lifecycle): [optional] # noqa: E501 - liveness_probe (Probe): [optional] # noqa: E501 - name (str): Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated.. [optional] # noqa: E501 - ports ([ContainerPort]): List of ports to expose from the container. Exposing a port here gives the system additional information about the network connections a container uses, but is primarily informational. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Cannot be updated.. [optional] # noqa: E501 - readiness_probe (Probe): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - security_context (SecurityContext): [optional] # noqa: E501 - startup_probe (Probe): [optional] # noqa: E501 - stdin (bool): Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.. [optional] # noqa: E501 - stdin_once (bool): Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false. [optional] # noqa: E501 - termination_message_path (str): Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.. [optional] # noqa: E501 - termination_message_policy (str): Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. Possible enum values: - `\"FallbackToLogsOnError\"` will read the most recent contents of the container logs for the container status message when the container exits with an error and the terminationMessagePath has no contents. - `\"File\"` is the default behavior and will set the container status message to the contents of the container's terminationMessagePath when the container exits.. [optional] # noqa: E501 - tty (bool): Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.. [optional] # noqa: E501 - volume_devices ([VolumeDevice]): volumeDevices is the list of block devices to be used by the container.. [optional] # noqa: E501 - volume_mounts ([VolumeMount]): Pod volumes to mount into the container's filesystem. Cannot be updated.. [optional] # noqa: E501 - working_dir (str): Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.image = image - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, image, *args, **kwargs): # noqa: E501 - """Container - a model defined in OpenAPI - - Args: - image (str): Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 - env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images Possible enum values: - `\"Always\"` means that kubelet always attempts to pull the latest image. Container will fail If the pull fails. - `\"IfNotPresent\"` means that kubelet pulls if the image isn't present on disk. Container will fail if the image isn't present and the pull fails. - `\"Never\"` means that kubelet never pulls an image, but only uses a local image. Container will fail if the image isn't present. [optional] # noqa: E501 - lifecycle (Lifecycle): [optional] # noqa: E501 - liveness_probe (Probe): [optional] # noqa: E501 - name (str): Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated.. [optional] # noqa: E501 - ports ([ContainerPort]): List of ports to expose from the container. Exposing a port here gives the system additional information about the network connections a container uses, but is primarily informational. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Cannot be updated.. [optional] # noqa: E501 - readiness_probe (Probe): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - security_context (SecurityContext): [optional] # noqa: E501 - startup_probe (Probe): [optional] # noqa: E501 - stdin (bool): Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.. [optional] # noqa: E501 - stdin_once (bool): Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false. [optional] # noqa: E501 - termination_message_path (str): Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.. [optional] # noqa: E501 - termination_message_policy (str): Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. Possible enum values: - `\"FallbackToLogsOnError\"` will read the most recent contents of the container logs for the container status message when the container exits with an error and the terminationMessagePath has no contents. - `\"File\"` is the default behavior and will set the container status message to the contents of the container's terminationMessagePath when the container exits.. [optional] # noqa: E501 - tty (bool): Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.. [optional] # noqa: E501 - volume_devices ([VolumeDevice]): volumeDevices is the list of block devices to be used by the container.. [optional] # noqa: E501 - volume_mounts ([VolumeMount]): Pod volumes to mount into the container's filesystem. Cannot be updated.. [optional] # noqa: E501 - working_dir (str): Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.image = image - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/container_port.py b/sdks/python/client/argo_workflows/model/container_port.py deleted file mode 100644 index 15c182a4735c..000000000000 --- a/sdks/python/client/argo_workflows/model/container_port.py +++ /dev/null @@ -1,282 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ContainerPort(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ('protocol',): { - 'SCTP': "SCTP", - 'TCP': "TCP", - 'UDP': "UDP", - }, - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'container_port': (int,), # noqa: E501 - 'host_ip': (str,), # noqa: E501 - 'host_port': (int,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'protocol': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'container_port': 'containerPort', # noqa: E501 - 'host_ip': 'hostIP', # noqa: E501 - 'host_port': 'hostPort', # noqa: E501 - 'name': 'name', # noqa: E501 - 'protocol': 'protocol', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, container_port, *args, **kwargs): # noqa: E501 - """ContainerPort - a model defined in OpenAPI - - Args: - container_port (int): Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - host_ip (str): What host IP to bind the external port to.. [optional] # noqa: E501 - host_port (int): Number of port to expose on the host. If specified, this must be a valid port number, 0 < x < 65536. If HostNetwork is specified, this must match ContainerPort. Most containers do not need this.. [optional] # noqa: E501 - name (str): If specified, this must be an IANA_SVC_NAME and unique within the pod. Each named port in a pod must have a unique name. Name for the port that can be referred to by services.. [optional] # noqa: E501 - protocol (str): Protocol for port. Must be UDP, TCP, or SCTP. Defaults to \"TCP\". Possible enum values: - `\"SCTP\"` is the SCTP protocol. - `\"TCP\"` is the TCP protocol. - `\"UDP\"` is the UDP protocol.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.container_port = container_port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, container_port, *args, **kwargs): # noqa: E501 - """ContainerPort - a model defined in OpenAPI - - Args: - container_port (int): Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - host_ip (str): What host IP to bind the external port to.. [optional] # noqa: E501 - host_port (int): Number of port to expose on the host. If specified, this must be a valid port number, 0 < x < 65536. If HostNetwork is specified, this must match ContainerPort. Most containers do not need this.. [optional] # noqa: E501 - name (str): If specified, this must be an IANA_SVC_NAME and unique within the pod. Each named port in a pod must have a unique name. Name for the port that can be referred to by services.. [optional] # noqa: E501 - protocol (str): Protocol for port. Must be UDP, TCP, or SCTP. Defaults to \"TCP\". Possible enum values: - `\"SCTP\"` is the SCTP protocol. - `\"TCP\"` is the TCP protocol. - `\"UDP\"` is the UDP protocol.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.container_port = container_port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/create_options.py b/sdks/python/client/argo_workflows/model/create_options.py deleted file mode 100644 index ec2bd5534cc2..000000000000 --- a/sdks/python/client/argo_workflows/model/create_options.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class CreateOptions(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'dry_run': ([str],), # noqa: E501 - 'field_manager': (str,), # noqa: E501 - 'field_validation': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'dry_run': 'dryRun', # noqa: E501 - 'field_manager': 'fieldManager', # noqa: E501 - 'field_validation': 'fieldValidation', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """CreateOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dry_run ([str]): [optional] # noqa: E501 - field_manager (str): [optional] # noqa: E501 - field_validation (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """CreateOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dry_run ([str]): [optional] # noqa: E501 - field_manager (str): [optional] # noqa: E501 - field_validation (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/csi_volume_source.py b/sdks/python/client/argo_workflows/model/csi_volume_source.py deleted file mode 100644 index cfa528787e97..000000000000 --- a/sdks/python/client/argo_workflows/model/csi_volume_source.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.local_object_reference import LocalObjectReference - globals()['LocalObjectReference'] = LocalObjectReference - - -class CSIVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'driver': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'node_publish_secret_ref': (LocalObjectReference,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'volume_attributes': ({str: (str,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'driver': 'driver', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'node_publish_secret_ref': 'nodePublishSecretRef', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'volume_attributes': 'volumeAttributes', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, driver, *args, **kwargs): # noqa: E501 - """CSIVolumeSource - a model defined in OpenAPI - - Args: - driver (str): Driver is the name of the CSI driver that handles this volume. Consult with your admin for the correct name as registered in the cluster. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Ex. \"ext4\", \"xfs\", \"ntfs\". If not provided, the empty value is passed to the associated CSI driver which will determine the default filesystem to apply.. [optional] # noqa: E501 - node_publish_secret_ref (LocalObjectReference): [optional] # noqa: E501 - read_only (bool): Specifies a read-only configuration for the volume. Defaults to false (read/write).. [optional] # noqa: E501 - volume_attributes ({str: (str,)}): VolumeAttributes stores driver-specific properties that are passed to the CSI driver. Consult your driver's documentation for supported values.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.driver = driver - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, driver, *args, **kwargs): # noqa: E501 - """CSIVolumeSource - a model defined in OpenAPI - - Args: - driver (str): Driver is the name of the CSI driver that handles this volume. Consult with your admin for the correct name as registered in the cluster. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Ex. \"ext4\", \"xfs\", \"ntfs\". If not provided, the empty value is passed to the associated CSI driver which will determine the default filesystem to apply.. [optional] # noqa: E501 - node_publish_secret_ref (LocalObjectReference): [optional] # noqa: E501 - read_only (bool): Specifies a read-only configuration for the volume. Defaults to false (read/write).. [optional] # noqa: E501 - volume_attributes ({str: (str,)}): VolumeAttributes stores driver-specific properties that are passed to the CSI driver. Consult your driver's documentation for supported values.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.driver = driver - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/downward_api_projection.py b/sdks/python/client/argo_workflows/model/downward_api_projection.py deleted file mode 100644 index 52c53a9ba72f..000000000000 --- a/sdks/python/client/argo_workflows/model/downward_api_projection.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.downward_api_volume_file import DownwardAPIVolumeFile - globals()['DownwardAPIVolumeFile'] = DownwardAPIVolumeFile - - -class DownwardAPIProjection(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([DownwardAPIVolumeFile],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """DownwardAPIProjection - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([DownwardAPIVolumeFile]): Items is a list of DownwardAPIVolume file. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """DownwardAPIProjection - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([DownwardAPIVolumeFile]): Items is a list of DownwardAPIVolume file. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/downward_api_volume_file.py b/sdks/python/client/argo_workflows/model/downward_api_volume_file.py deleted file mode 100644 index 0ff6d17ee505..000000000000 --- a/sdks/python/client/argo_workflows/model/downward_api_volume_file.py +++ /dev/null @@ -1,281 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.object_field_selector import ObjectFieldSelector - from argo_workflows.model.resource_field_selector import ResourceFieldSelector - globals()['ObjectFieldSelector'] = ObjectFieldSelector - globals()['ResourceFieldSelector'] = ResourceFieldSelector - - -class DownwardAPIVolumeFile(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'path': (str,), # noqa: E501 - 'field_ref': (ObjectFieldSelector,), # noqa: E501 - 'mode': (int,), # noqa: E501 - 'resource_field_ref': (ResourceFieldSelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'path': 'path', # noqa: E501 - 'field_ref': 'fieldRef', # noqa: E501 - 'mode': 'mode', # noqa: E501 - 'resource_field_ref': 'resourceFieldRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, path, *args, **kwargs): # noqa: E501 - """DownwardAPIVolumeFile - a model defined in OpenAPI - - Args: - path (str): Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..' - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - field_ref (ObjectFieldSelector): [optional] # noqa: E501 - mode (int): Optional: mode bits used to set permissions on this file, must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - resource_field_ref (ResourceFieldSelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, path, *args, **kwargs): # noqa: E501 - """DownwardAPIVolumeFile - a model defined in OpenAPI - - Args: - path (str): Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..' - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - field_ref (ObjectFieldSelector): [optional] # noqa: E501 - mode (int): Optional: mode bits used to set permissions on this file, must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - resource_field_ref (ResourceFieldSelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/downward_api_volume_source.py b/sdks/python/client/argo_workflows/model/downward_api_volume_source.py deleted file mode 100644 index 2254b6392fe0..000000000000 --- a/sdks/python/client/argo_workflows/model/downward_api_volume_source.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.downward_api_volume_file import DownwardAPIVolumeFile - globals()['DownwardAPIVolumeFile'] = DownwardAPIVolumeFile - - -class DownwardAPIVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'default_mode': (int,), # noqa: E501 - 'items': ([DownwardAPIVolumeFile],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'default_mode': 'defaultMode', # noqa: E501 - 'items': 'items', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """DownwardAPIVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default_mode (int): Optional: mode bits to use on created files by default. Must be a Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - items ([DownwardAPIVolumeFile]): Items is a list of downward API volume file. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """DownwardAPIVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default_mode (int): Optional: mode bits to use on created files by default. Must be a Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - items ([DownwardAPIVolumeFile]): Items is a list of downward API volume file. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/duration.py b/sdks/python/client/argo_workflows/model/duration.py deleted file mode 100644 index 624f4e17cad6..000000000000 --- a/sdks/python/client/argo_workflows/model/duration.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class Duration(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'duration': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'duration': 'duration', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """Duration - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """Duration - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/empty_dir_volume_source.py b/sdks/python/client/argo_workflows/model/empty_dir_volume_source.py deleted file mode 100644 index e8ccff6a2235..000000000000 --- a/sdks/python/client/argo_workflows/model/empty_dir_volume_source.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class EmptyDirVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'medium': (str,), # noqa: E501 - 'size_limit': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'medium': 'medium', # noqa: E501 - 'size_limit': 'sizeLimit', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EmptyDirVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - medium (str): What type of storage medium should back this directory. The default is \"\" which means to use the node's default medium. Must be an empty string (default) or Memory. More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir. [optional] # noqa: E501 - size_limit (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EmptyDirVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - medium (str): What type of storage medium should back this directory. The default is \"\" which means to use the node's default medium. Must be an empty string (default) or Memory. More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir. [optional] # noqa: E501 - size_limit (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/env_from_source.py b/sdks/python/client/argo_workflows/model/env_from_source.py deleted file mode 100644 index 01f02d608447..000000000000 --- a/sdks/python/client/argo_workflows/model/env_from_source.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_env_source import ConfigMapEnvSource - from argo_workflows.model.secret_env_source import SecretEnvSource - globals()['ConfigMapEnvSource'] = ConfigMapEnvSource - globals()['SecretEnvSource'] = SecretEnvSource - - -class EnvFromSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'config_map_ref': (ConfigMapEnvSource,), # noqa: E501 - 'prefix': (str,), # noqa: E501 - 'secret_ref': (SecretEnvSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'config_map_ref': 'configMapRef', # noqa: E501 - 'prefix': 'prefix', # noqa: E501 - 'secret_ref': 'secretRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EnvFromSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_ref (ConfigMapEnvSource): [optional] # noqa: E501 - prefix (str): An optional identifier to prepend to each key in the ConfigMap. Must be a C_IDENTIFIER.. [optional] # noqa: E501 - secret_ref (SecretEnvSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EnvFromSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_ref (ConfigMapEnvSource): [optional] # noqa: E501 - prefix (str): An optional identifier to prepend to each key in the ConfigMap. Must be a C_IDENTIFIER.. [optional] # noqa: E501 - secret_ref (SecretEnvSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/env_var.py b/sdks/python/client/argo_workflows/model/env_var.py deleted file mode 100644 index a46cde40f49c..000000000000 --- a/sdks/python/client/argo_workflows/model/env_var.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.env_var_source import EnvVarSource - globals()['EnvVarSource'] = EnvVarSource - - -class EnvVar(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - 'value_from': (EnvVarSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'value': 'value', # noqa: E501 - 'value_from': 'valueFrom', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """EnvVar - a model defined in OpenAPI - - Args: - name (str): Name of the environment variable. Must be a C_IDENTIFIER. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): Variable references $(VAR_NAME) are expanded using the previously defined environment variables in the container and any service environment variables. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Defaults to \"\".. [optional] # noqa: E501 - value_from (EnvVarSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """EnvVar - a model defined in OpenAPI - - Args: - name (str): Name of the environment variable. Must be a C_IDENTIFIER. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): Variable references $(VAR_NAME) are expanded using the previously defined environment variables in the container and any service environment variables. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Defaults to \"\".. [optional] # noqa: E501 - value_from (EnvVarSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/env_var_source.py b/sdks/python/client/argo_workflows/model/env_var_source.py deleted file mode 100644 index 8d728eeabd23..000000000000 --- a/sdks/python/client/argo_workflows/model/env_var_source.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - from argo_workflows.model.object_field_selector import ObjectFieldSelector - from argo_workflows.model.resource_field_selector import ResourceFieldSelector - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - globals()['ObjectFieldSelector'] = ObjectFieldSelector - globals()['ResourceFieldSelector'] = ResourceFieldSelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class EnvVarSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'config_map_key_ref': (ConfigMapKeySelector,), # noqa: E501 - 'field_ref': (ObjectFieldSelector,), # noqa: E501 - 'resource_field_ref': (ResourceFieldSelector,), # noqa: E501 - 'secret_key_ref': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'config_map_key_ref': 'configMapKeyRef', # noqa: E501 - 'field_ref': 'fieldRef', # noqa: E501 - 'resource_field_ref': 'resourceFieldRef', # noqa: E501 - 'secret_key_ref': 'secretKeyRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EnvVarSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 - field_ref (ObjectFieldSelector): [optional] # noqa: E501 - resource_field_ref (ResourceFieldSelector): [optional] # noqa: E501 - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EnvVarSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 - field_ref (ObjectFieldSelector): [optional] # noqa: E501 - resource_field_ref (ResourceFieldSelector): [optional] # noqa: E501 - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/ephemeral_volume_source.py b/sdks/python/client/argo_workflows/model/ephemeral_volume_source.py deleted file mode 100644 index 0e84bcc4eeba..000000000000 --- a/sdks/python/client/argo_workflows/model/ephemeral_volume_source.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.persistent_volume_claim_template import PersistentVolumeClaimTemplate - globals()['PersistentVolumeClaimTemplate'] = PersistentVolumeClaimTemplate - - -class EphemeralVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'volume_claim_template': (PersistentVolumeClaimTemplate,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'volume_claim_template': 'volumeClaimTemplate', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EphemeralVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - volume_claim_template (PersistentVolumeClaimTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EphemeralVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - volume_claim_template (PersistentVolumeClaimTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/event.py b/sdks/python/client/argo_workflows/model/event.py deleted file mode 100644 index a140a3c29827..000000000000 --- a/sdks/python/client/argo_workflows/model/event.py +++ /dev/null @@ -1,339 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.event_series import EventSeries - from argo_workflows.model.event_source import EventSource - from argo_workflows.model.object_meta import ObjectMeta - from argo_workflows.model.object_reference import ObjectReference - globals()['EventSeries'] = EventSeries - globals()['EventSource'] = EventSource - globals()['ObjectMeta'] = ObjectMeta - globals()['ObjectReference'] = ObjectReference - - -class Event(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'involved_object': (ObjectReference,), # noqa: E501 - 'metadata': (ObjectMeta,), # noqa: E501 - 'action': (str,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'count': (int,), # noqa: E501 - 'event_time': (datetime,), # noqa: E501 - 'first_timestamp': (datetime,), # noqa: E501 - 'kind': (str,), # noqa: E501 - 'last_timestamp': (datetime,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'reason': (str,), # noqa: E501 - 'related': (ObjectReference,), # noqa: E501 - 'reporting_component': (str,), # noqa: E501 - 'reporting_instance': (str,), # noqa: E501 - 'series': (EventSeries,), # noqa: E501 - 'source': (EventSource,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'involved_object': 'involvedObject', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'action': 'action', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'count': 'count', # noqa: E501 - 'event_time': 'eventTime', # noqa: E501 - 'first_timestamp': 'firstTimestamp', # noqa: E501 - 'kind': 'kind', # noqa: E501 - 'last_timestamp': 'lastTimestamp', # noqa: E501 - 'message': 'message', # noqa: E501 - 'reason': 'reason', # noqa: E501 - 'related': 'related', # noqa: E501 - 'reporting_component': 'reportingComponent', # noqa: E501 - 'reporting_instance': 'reportingInstance', # noqa: E501 - 'series': 'series', # noqa: E501 - 'source': 'source', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, involved_object, metadata, *args, **kwargs): # noqa: E501 - """Event - a model defined in OpenAPI - - Args: - involved_object (ObjectReference): - metadata (ObjectMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - action (str): What action was taken/failed regarding to the Regarding object.. [optional] # noqa: E501 - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - count (int): The number of times this event has occurred.. [optional] # noqa: E501 - event_time (datetime): MicroTime is version of Time with microsecond level precision.. [optional] # noqa: E501 - first_timestamp (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - last_timestamp (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): A human-readable description of the status of this operation.. [optional] # noqa: E501 - reason (str): This should be a short, machine understandable string that gives the reason for the transition into the object's current status.. [optional] # noqa: E501 - related (ObjectReference): [optional] # noqa: E501 - reporting_component (str): Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`.. [optional] # noqa: E501 - reporting_instance (str): ID of the controller instance, e.g. `kubelet-xyzf`.. [optional] # noqa: E501 - series (EventSeries): [optional] # noqa: E501 - source (EventSource): [optional] # noqa: E501 - type (str): Type of this event (Normal, Warning), new types could be added in the future. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.involved_object = involved_object - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, involved_object, metadata, *args, **kwargs): # noqa: E501 - """Event - a model defined in OpenAPI - - Args: - involved_object (ObjectReference): - metadata (ObjectMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - action (str): What action was taken/failed regarding to the Regarding object.. [optional] # noqa: E501 - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - count (int): The number of times this event has occurred.. [optional] # noqa: E501 - event_time (datetime): MicroTime is version of Time with microsecond level precision.. [optional] # noqa: E501 - first_timestamp (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - last_timestamp (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): A human-readable description of the status of this operation.. [optional] # noqa: E501 - reason (str): This should be a short, machine understandable string that gives the reason for the transition into the object's current status.. [optional] # noqa: E501 - related (ObjectReference): [optional] # noqa: E501 - reporting_component (str): Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`.. [optional] # noqa: E501 - reporting_instance (str): ID of the controller instance, e.g. `kubelet-xyzf`.. [optional] # noqa: E501 - series (EventSeries): [optional] # noqa: E501 - source (EventSource): [optional] # noqa: E501 - type (str): Type of this event (Normal, Warning), new types could be added in the future. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.involved_object = involved_object - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/event_series.py b/sdks/python/client/argo_workflows/model/event_series.py deleted file mode 100644 index 03b18074310e..000000000000 --- a/sdks/python/client/argo_workflows/model/event_series.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class EventSeries(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'count': (int,), # noqa: E501 - 'last_observed_time': (datetime,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'count': 'count', # noqa: E501 - 'last_observed_time': 'lastObservedTime', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EventSeries - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - count (int): Number of occurrences in this series up to the last heartbeat time. [optional] # noqa: E501 - last_observed_time (datetime): MicroTime is version of Time with microsecond level precision.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EventSeries - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - count (int): Number of occurrences in this series up to the last heartbeat time. [optional] # noqa: E501 - last_observed_time (datetime): MicroTime is version of Time with microsecond level precision.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/event_source.py b/sdks/python/client/argo_workflows/model/event_source.py deleted file mode 100644 index 75a47b809300..000000000000 --- a/sdks/python/client/argo_workflows/model/event_source.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class EventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'component': (str,), # noqa: E501 - 'host': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'component': 'component', # noqa: E501 - 'host': 'host', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - component (str): Component from which the event is generated.. [optional] # noqa: E501 - host (str): Node name on which the event is generated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - component (str): Component from which the event is generated.. [optional] # noqa: E501 - host (str): Node name on which the event is generated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/eventsource_create_event_source_request.py b/sdks/python/client/argo_workflows/model/eventsource_create_event_source_request.py deleted file mode 100644 index 83af421a7a84..000000000000 --- a/sdks/python/client/argo_workflows/model/eventsource_create_event_source_request.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource - globals()['IoArgoprojEventsV1alpha1EventSource'] = IoArgoprojEventsV1alpha1EventSource - - -class EventsourceCreateEventSourceRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'event_source': (IoArgoprojEventsV1alpha1EventSource,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'event_source': 'eventSource', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EventsourceCreateEventSourceRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_source (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EventsourceCreateEventSourceRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_source (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/eventsource_event_source_watch_event.py b/sdks/python/client/argo_workflows/model/eventsource_event_source_watch_event.py deleted file mode 100644 index e0bb4c50191b..000000000000 --- a/sdks/python/client/argo_workflows/model/eventsource_event_source_watch_event.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource - globals()['IoArgoprojEventsV1alpha1EventSource'] = IoArgoprojEventsV1alpha1EventSource - - -class EventsourceEventSourceWatchEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'object': (IoArgoprojEventsV1alpha1EventSource,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'object': 'object', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EventsourceEventSourceWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EventsourceEventSourceWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/eventsource_log_entry.py b/sdks/python/client/argo_workflows/model/eventsource_log_entry.py deleted file mode 100644 index 5201eeb6140c..000000000000 --- a/sdks/python/client/argo_workflows/model/eventsource_log_entry.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class EventsourceLogEntry(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'event_name': (str,), # noqa: E501 - 'event_source_name': (str,), # noqa: E501 - 'event_source_type': (str,), # noqa: E501 - 'level': (str,), # noqa: E501 - 'msg': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'time': (datetime,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'event_name': 'eventName', # noqa: E501 - 'event_source_name': 'eventSourceName', # noqa: E501 - 'event_source_type': 'eventSourceType', # noqa: E501 - 'level': 'level', # noqa: E501 - 'msg': 'msg', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'time': 'time', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EventsourceLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_name (str): [optional] # noqa: E501 - event_source_name (str): [optional] # noqa: E501 - event_source_type (str): [optional] # noqa: E501 - level (str): [optional] # noqa: E501 - msg (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EventsourceLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_name (str): [optional] # noqa: E501 - event_source_name (str): [optional] # noqa: E501 - event_source_type (str): [optional] # noqa: E501 - level (str): [optional] # noqa: E501 - msg (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/eventsource_update_event_source_request.py b/sdks/python/client/argo_workflows/model/eventsource_update_event_source_request.py deleted file mode 100644 index 1f00975b5e79..000000000000 --- a/sdks/python/client/argo_workflows/model/eventsource_update_event_source_request.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource - globals()['IoArgoprojEventsV1alpha1EventSource'] = IoArgoprojEventsV1alpha1EventSource - - -class EventsourceUpdateEventSourceRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'event_source': (IoArgoprojEventsV1alpha1EventSource,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'event_source': 'eventSource', # noqa: E501 - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """EventsourceUpdateEventSourceRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_source (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """EventsourceUpdateEventSourceRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_source (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/exec_action.py b/sdks/python/client/argo_workflows/model/exec_action.py deleted file mode 100644 index 5b9c49d58015..000000000000 --- a/sdks/python/client/argo_workflows/model/exec_action.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ExecAction(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'command': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'command': 'command', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ExecAction - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - command ([str]): Command is the command line to execute inside the container, the working directory for the command is root ('/') in the container's filesystem. The command is simply exec'd, it is not run inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and non-zero is unhealthy.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ExecAction - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - command ([str]): Command is the command line to execute inside the container, the working directory for the command is root ('/') in the container's filesystem. The command is simply exec'd, it is not run inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and non-zero is unhealthy.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/fc_volume_source.py b/sdks/python/client/argo_workflows/model/fc_volume_source.py deleted file mode 100644 index 1c87fc425e23..000000000000 --- a/sdks/python/client/argo_workflows/model/fc_volume_source.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class FCVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'fs_type': (str,), # noqa: E501 - 'lun': (int,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'target_wwns': ([str],), # noqa: E501 - 'wwids': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'fs_type': 'fsType', # noqa: E501 - 'lun': 'lun', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'target_wwns': 'targetWWNs', # noqa: E501 - 'wwids': 'wwids', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """FCVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - lun (int): Optional: FC target lun number. [optional] # noqa: E501 - read_only (bool): Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - target_wwns ([str]): Optional: FC target worldwide names (WWNs). [optional] # noqa: E501 - wwids ([str]): Optional: FC volume world wide identifiers (wwids) Either wwids or combination of targetWWNs and lun must be set, but not both simultaneously.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """FCVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - lun (int): Optional: FC target lun number. [optional] # noqa: E501 - read_only (bool): Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - target_wwns ([str]): Optional: FC target worldwide names (WWNs). [optional] # noqa: E501 - wwids ([str]): Optional: FC volume world wide identifiers (wwids) Either wwids or combination of targetWWNs and lun must be set, but not both simultaneously.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/flex_volume_source.py b/sdks/python/client/argo_workflows/model/flex_volume_source.py deleted file mode 100644 index 910e2cb85a23..000000000000 --- a/sdks/python/client/argo_workflows/model/flex_volume_source.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.local_object_reference import LocalObjectReference - globals()['LocalObjectReference'] = LocalObjectReference - - -class FlexVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'driver': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'options': ({str: (str,)},), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'secret_ref': (LocalObjectReference,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'driver': 'driver', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'options': 'options', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'secret_ref': 'secretRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, driver, *args, **kwargs): # noqa: E501 - """FlexVolumeSource - a model defined in OpenAPI - - Args: - driver (str): Driver is the name of the driver to use for this volume. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". The default filesystem depends on FlexVolume script.. [optional] # noqa: E501 - options ({str: (str,)}): Optional: Extra command options if any.. [optional] # noqa: E501 - read_only (bool): Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.driver = driver - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, driver, *args, **kwargs): # noqa: E501 - """FlexVolumeSource - a model defined in OpenAPI - - Args: - driver (str): Driver is the name of the driver to use for this volume. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". The default filesystem depends on FlexVolume script.. [optional] # noqa: E501 - options ({str: (str,)}): Optional: Extra command options if any.. [optional] # noqa: E501 - read_only (bool): Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.driver = driver - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/flocker_volume_source.py b/sdks/python/client/argo_workflows/model/flocker_volume_source.py deleted file mode 100644 index a362f844f36e..000000000000 --- a/sdks/python/client/argo_workflows/model/flocker_volume_source.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class FlockerVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'dataset_name': (str,), # noqa: E501 - 'dataset_uuid': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'dataset_name': 'datasetName', # noqa: E501 - 'dataset_uuid': 'datasetUUID', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """FlockerVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dataset_name (str): Name of the dataset stored as metadata -> name on the dataset for Flocker should be considered as deprecated. [optional] # noqa: E501 - dataset_uuid (str): UUID of the dataset. This is unique identifier of a Flocker dataset. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """FlockerVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dataset_name (str): Name of the dataset stored as metadata -> name on the dataset for Flocker should be considered as deprecated. [optional] # noqa: E501 - dataset_uuid (str): UUID of the dataset. This is unique identifier of a Flocker dataset. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/gce_persistent_disk_volume_source.py b/sdks/python/client/argo_workflows/model/gce_persistent_disk_volume_source.py deleted file mode 100644 index 46f9a5b1d50d..000000000000 --- a/sdks/python/client/argo_workflows/model/gce_persistent_disk_volume_source.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class GCEPersistentDiskVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'pd_name': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'partition': (int,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'pd_name': 'pdName', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'partition': 'partition', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, pd_name, *args, **kwargs): # noqa: E501 - """GCEPersistentDiskVolumeSource - a model defined in OpenAPI - - Args: - pd_name (str): Unique name of the PD resource in GCE. Used to identify the disk in GCE. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk. [optional] # noqa: E501 - partition (int): The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty). More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk. [optional] # noqa: E501 - read_only (bool): ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.pd_name = pd_name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, pd_name, *args, **kwargs): # noqa: E501 - """GCEPersistentDiskVolumeSource - a model defined in OpenAPI - - Args: - pd_name (str): Unique name of the PD resource in GCE. Used to identify the disk in GCE. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk. [optional] # noqa: E501 - partition (int): The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty). More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk. [optional] # noqa: E501 - read_only (bool): ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.pd_name = pd_name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/git_repo_volume_source.py b/sdks/python/client/argo_workflows/model/git_repo_volume_source.py deleted file mode 100644 index ad898421a0fc..000000000000 --- a/sdks/python/client/argo_workflows/model/git_repo_volume_source.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class GitRepoVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'repository': (str,), # noqa: E501 - 'directory': (str,), # noqa: E501 - 'revision': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'repository': 'repository', # noqa: E501 - 'directory': 'directory', # noqa: E501 - 'revision': 'revision', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, repository, *args, **kwargs): # noqa: E501 - """GitRepoVolumeSource - a model defined in OpenAPI - - Args: - repository (str): Repository URL - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - directory (str): Target directory name. Must not contain or start with '..'. If '.' is supplied, the volume directory will be the git repository. Otherwise, if specified, the volume will contain the git repository in the subdirectory with the given name.. [optional] # noqa: E501 - revision (str): Commit hash for the specified revision.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.repository = repository - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, repository, *args, **kwargs): # noqa: E501 - """GitRepoVolumeSource - a model defined in OpenAPI - - Args: - repository (str): Repository URL - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - directory (str): Target directory name. Must not contain or start with '..'. If '.' is supplied, the volume directory will be the git repository. Otherwise, if specified, the volume will contain the git repository in the subdirectory with the given name.. [optional] # noqa: E501 - revision (str): Commit hash for the specified revision.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.repository = repository - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/glusterfs_volume_source.py b/sdks/python/client/argo_workflows/model/glusterfs_volume_source.py deleted file mode 100644 index 6d39843cc0b1..000000000000 --- a/sdks/python/client/argo_workflows/model/glusterfs_volume_source.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class GlusterfsVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'endpoints': (str,), # noqa: E501 - 'path': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'endpoints': 'endpoints', # noqa: E501 - 'path': 'path', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, endpoints, path, *args, **kwargs): # noqa: E501 - """GlusterfsVolumeSource - a model defined in OpenAPI - - Args: - endpoints (str): EndpointsName is the endpoint name that details Glusterfs topology. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod - path (str): Path is the Glusterfs volume path. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - read_only (bool): ReadOnly here will force the Glusterfs volume to be mounted with read-only permissions. Defaults to false. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.endpoints = endpoints - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, endpoints, path, *args, **kwargs): # noqa: E501 - """GlusterfsVolumeSource - a model defined in OpenAPI - - Args: - endpoints (str): EndpointsName is the endpoint name that details Glusterfs topology. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod - path (str): Path is the Glusterfs volume path. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - read_only (bool): ReadOnly here will force the Glusterfs volume to be mounted with read-only permissions. Defaults to false. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.endpoints = endpoints - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/google_protobuf_any.py b/sdks/python/client/argo_workflows/model/google_protobuf_any.py deleted file mode 100644 index dd11635fc9de..000000000000 --- a/sdks/python/client/argo_workflows/model/google_protobuf_any.py +++ /dev/null @@ -1,264 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class GoogleProtobufAny(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - ('value',): { - 'regex': { - 'pattern': r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', # noqa: E501 - }, - }, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'type_url': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'type_url': 'type_url', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GoogleProtobufAny - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - type_url (str): [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GoogleProtobufAny - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - type_url (str): [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/group_version_resource.py b/sdks/python/client/argo_workflows/model/group_version_resource.py deleted file mode 100644 index edb3e16bb6bd..000000000000 --- a/sdks/python/client/argo_workflows/model/group_version_resource.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class GroupVersionResource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'group': (str,), # noqa: E501 - 'resource': (str,), # noqa: E501 - 'version': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'group': 'group', # noqa: E501 - 'resource': 'resource', # noqa: E501 - 'version': 'version', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GroupVersionResource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - group (str): [optional] # noqa: E501 - resource (str): [optional] # noqa: E501 - version (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GroupVersionResource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - group (str): [optional] # noqa: E501 - resource (str): [optional] # noqa: E501 - version (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/grpc_action.py b/sdks/python/client/argo_workflows/model/grpc_action.py deleted file mode 100644 index 08d0be473b18..000000000000 --- a/sdks/python/client/argo_workflows/model/grpc_action.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class GRPCAction(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'port': (int,), # noqa: E501 - 'service': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'port': 'port', # noqa: E501 - 'service': 'service', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, port, *args, **kwargs): # noqa: E501 - """GRPCAction - a model defined in OpenAPI - - Args: - port (int): Port number of the gRPC service. Number must be in the range 1 to 65535. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - service (str): Service is the name of the service to place in the gRPC HealthCheckRequest (see https://github.com/grpc/grpc/blob/master/doc/health-checking.md). If this is not specified, the default behavior is defined by gRPC.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.port = port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, port, *args, **kwargs): # noqa: E501 - """GRPCAction - a model defined in OpenAPI - - Args: - port (int): Port number of the gRPC service. Number must be in the range 1 to 65535. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - service (str): Service is the name of the service to place in the gRPC HealthCheckRequest (see https://github.com/grpc/grpc/blob/master/doc/health-checking.md). If this is not specified, the default behavior is defined by gRPC.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.port = port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/grpc_gateway_runtime_error.py b/sdks/python/client/argo_workflows/model/grpc_gateway_runtime_error.py deleted file mode 100644 index 9f3fd97cfa95..000000000000 --- a/sdks/python/client/argo_workflows/model/grpc_gateway_runtime_error.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.google_protobuf_any import GoogleProtobufAny - globals()['GoogleProtobufAny'] = GoogleProtobufAny - - -class GrpcGatewayRuntimeError(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'code': (int,), # noqa: E501 - 'details': ([GoogleProtobufAny],), # noqa: E501 - 'error': (str,), # noqa: E501 - 'message': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'code': 'code', # noqa: E501 - 'details': 'details', # noqa: E501 - 'error': 'error', # noqa: E501 - 'message': 'message', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GrpcGatewayRuntimeError - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - code (int): [optional] # noqa: E501 - details ([GoogleProtobufAny]): [optional] # noqa: E501 - error (str): [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GrpcGatewayRuntimeError - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - code (int): [optional] # noqa: E501 - details ([GoogleProtobufAny]): [optional] # noqa: E501 - error (str): [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/grpc_gateway_runtime_stream_error.py b/sdks/python/client/argo_workflows/model/grpc_gateway_runtime_stream_error.py deleted file mode 100644 index 99b82d1bf108..000000000000 --- a/sdks/python/client/argo_workflows/model/grpc_gateway_runtime_stream_error.py +++ /dev/null @@ -1,277 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.google_protobuf_any import GoogleProtobufAny - globals()['GoogleProtobufAny'] = GoogleProtobufAny - - -class GrpcGatewayRuntimeStreamError(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'details': ([GoogleProtobufAny],), # noqa: E501 - 'grpc_code': (int,), # noqa: E501 - 'http_code': (int,), # noqa: E501 - 'http_status': (str,), # noqa: E501 - 'message': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'details': 'details', # noqa: E501 - 'grpc_code': 'grpc_code', # noqa: E501 - 'http_code': 'http_code', # noqa: E501 - 'http_status': 'http_status', # noqa: E501 - 'message': 'message', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """GrpcGatewayRuntimeStreamError - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - details ([GoogleProtobufAny]): [optional] # noqa: E501 - grpc_code (int): [optional] # noqa: E501 - http_code (int): [optional] # noqa: E501 - http_status (str): [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """GrpcGatewayRuntimeStreamError - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - details ([GoogleProtobufAny]): [optional] # noqa: E501 - grpc_code (int): [optional] # noqa: E501 - http_code (int): [optional] # noqa: E501 - http_status (str): [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/host_alias.py b/sdks/python/client/argo_workflows/model/host_alias.py deleted file mode 100644 index 7d1640053212..000000000000 --- a/sdks/python/client/argo_workflows/model/host_alias.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class HostAlias(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'hostnames': ([str],), # noqa: E501 - 'ip': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'hostnames': 'hostnames', # noqa: E501 - 'ip': 'ip', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """HostAlias - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - hostnames ([str]): Hostnames for the above IP address.. [optional] # noqa: E501 - ip (str): IP address of the host file entry.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """HostAlias - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - hostnames ([str]): Hostnames for the above IP address.. [optional] # noqa: E501 - ip (str): IP address of the host file entry.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/host_path_volume_source.py b/sdks/python/client/argo_workflows/model/host_path_volume_source.py deleted file mode 100644 index 6f9191ddf18c..000000000000 --- a/sdks/python/client/argo_workflows/model/host_path_volume_source.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class HostPathVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'path': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'path': 'path', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, path, *args, **kwargs): # noqa: E501 - """HostPathVolumeSource - a model defined in OpenAPI - - Args: - path (str): Path of the directory on the host. If the path is a symlink, it will follow the link to the real path. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - type (str): Type for HostPath Volume Defaults to \"\" More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, path, *args, **kwargs): # noqa: E501 - """HostPathVolumeSource - a model defined in OpenAPI - - Args: - path (str): Path of the directory on the host. If the path is a symlink, it will follow the link to the real path. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - type (str): Type for HostPath Volume Defaults to \"\" More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/http_get_action.py b/sdks/python/client/argo_workflows/model/http_get_action.py deleted file mode 100644 index 86a125d711f6..000000000000 --- a/sdks/python/client/argo_workflows/model/http_get_action.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.http_header import HTTPHeader - globals()['HTTPHeader'] = HTTPHeader - - -class HTTPGetAction(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ('scheme',): { - 'HTTP': "HTTP", - 'HTTPS': "HTTPS", - }, - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'port': (str,), # noqa: E501 - 'host': (str,), # noqa: E501 - 'http_headers': ([HTTPHeader],), # noqa: E501 - 'path': (str,), # noqa: E501 - 'scheme': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'port': 'port', # noqa: E501 - 'host': 'host', # noqa: E501 - 'http_headers': 'httpHeaders', # noqa: E501 - 'path': 'path', # noqa: E501 - 'scheme': 'scheme', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, port, *args, **kwargs): # noqa: E501 - """HTTPGetAction - a model defined in OpenAPI - - Args: - port (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - host (str): Host name to connect to, defaults to the pod IP. You probably want to set \"Host\" in httpHeaders instead.. [optional] # noqa: E501 - http_headers ([HTTPHeader]): Custom headers to set in the request. HTTP allows repeated headers.. [optional] # noqa: E501 - path (str): Path to access on the HTTP server.. [optional] # noqa: E501 - scheme (str): Scheme to use for connecting to the host. Defaults to HTTP. Possible enum values: - `\"HTTP\"` means that the scheme used will be http:// - `\"HTTPS\"` means that the scheme used will be https://. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.port = port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, port, *args, **kwargs): # noqa: E501 - """HTTPGetAction - a model defined in OpenAPI - - Args: - port (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - host (str): Host name to connect to, defaults to the pod IP. You probably want to set \"Host\" in httpHeaders instead.. [optional] # noqa: E501 - http_headers ([HTTPHeader]): Custom headers to set in the request. HTTP allows repeated headers.. [optional] # noqa: E501 - path (str): Path to access on the HTTP server.. [optional] # noqa: E501 - scheme (str): Scheme to use for connecting to the host. Defaults to HTTP. Possible enum values: - `\"HTTP\"` means that the scheme used will be http:// - `\"HTTPS\"` means that the scheme used will be https://. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.port = port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/http_header.py b/sdks/python/client/argo_workflows/model/http_header.py deleted file mode 100644 index de7ff70e9a83..000000000000 --- a/sdks/python/client/argo_workflows/model/http_header.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class HTTPHeader(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, value, *args, **kwargs): # noqa: E501 - """HTTPHeader - a model defined in OpenAPI - - Args: - name (str): The header field name - value (str): The header field value - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, value, *args, **kwargs): # noqa: E501 - """HTTPHeader - a model defined in OpenAPI - - Args: - name (str): The header field name - value (str): The header field value - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amount.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amount.py deleted file mode 100644 index 6cb7bbe57499..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amount.py +++ /dev/null @@ -1,260 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1Amount(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - ('value',): { - 'regex': { - 'pattern': r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', # noqa: E501 - }, - }, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Amount - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Amount - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_consume_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_consume_config.py deleted file mode 100644 index eaa7dd8acc3f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_consume_config.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1AMQPConsumeConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'auto_ack': (bool,), # noqa: E501 - 'consumer_tag': (str,), # noqa: E501 - 'exclusive': (bool,), # noqa: E501 - 'no_local': (bool,), # noqa: E501 - 'no_wait': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auto_ack': 'autoAck', # noqa: E501 - 'consumer_tag': 'consumerTag', # noqa: E501 - 'exclusive': 'exclusive', # noqa: E501 - 'no_local': 'noLocal', # noqa: E501 - 'no_wait': 'noWait', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPConsumeConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auto_ack (bool): [optional] # noqa: E501 - consumer_tag (str): [optional] # noqa: E501 - exclusive (bool): [optional] # noqa: E501 - no_local (bool): [optional] # noqa: E501 - no_wait (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPConsumeConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auto_ack (bool): [optional] # noqa: E501 - consumer_tag (str): [optional] # noqa: E501 - exclusive (bool): [optional] # noqa: E501 - no_local (bool): [optional] # noqa: E501 - no_wait (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_event_source.py deleted file mode 100644 index c48740be319f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_event_source.py +++ /dev/null @@ -1,333 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_consume_config import IoArgoprojEventsV1alpha1AMQPConsumeConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_exchange_declare_config import IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_queue_bind_config import IoArgoprojEventsV1alpha1AMQPQueueBindConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_queue_declare_config import IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1AMQPConsumeConfig'] = IoArgoprojEventsV1alpha1AMQPConsumeConfig - globals()['IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig'] = IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig - globals()['IoArgoprojEventsV1alpha1AMQPQueueBindConfig'] = IoArgoprojEventsV1alpha1AMQPQueueBindConfig - globals()['IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig'] = IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1AMQPEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'consume': (IoArgoprojEventsV1alpha1AMQPConsumeConfig,), # noqa: E501 - 'exchange_declare': (IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig,), # noqa: E501 - 'exchange_name': (str,), # noqa: E501 - 'exchange_type': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'queue_bind': (IoArgoprojEventsV1alpha1AMQPQueueBindConfig,), # noqa: E501 - 'queue_declare': (IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig,), # noqa: E501 - 'routing_key': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'url': (str,), # noqa: E501 - 'url_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth': 'auth', # noqa: E501 - 'connection_backoff': 'connectionBackoff', # noqa: E501 - 'consume': 'consume', # noqa: E501 - 'exchange_declare': 'exchangeDeclare', # noqa: E501 - 'exchange_name': 'exchangeName', # noqa: E501 - 'exchange_type': 'exchangeType', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'queue_bind': 'queueBind', # noqa: E501 - 'queue_declare': 'queueDeclare', # noqa: E501 - 'routing_key': 'routingKey', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'url': 'url', # noqa: E501 - 'url_secret': 'urlSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - consume (IoArgoprojEventsV1alpha1AMQPConsumeConfig): [optional] # noqa: E501 - exchange_declare (IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig): [optional] # noqa: E501 - exchange_name (str): [optional] # noqa: E501 - exchange_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - queue_bind (IoArgoprojEventsV1alpha1AMQPQueueBindConfig): [optional] # noqa: E501 - queue_declare (IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig): [optional] # noqa: E501 - routing_key (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - url_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - consume (IoArgoprojEventsV1alpha1AMQPConsumeConfig): [optional] # noqa: E501 - exchange_declare (IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig): [optional] # noqa: E501 - exchange_name (str): [optional] # noqa: E501 - exchange_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - queue_bind (IoArgoprojEventsV1alpha1AMQPQueueBindConfig): [optional] # noqa: E501 - queue_declare (IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig): [optional] # noqa: E501 - routing_key (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - url_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py deleted file mode 100644 index 78e154206e11..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'auto_delete': (bool,), # noqa: E501 - 'durable': (bool,), # noqa: E501 - 'internal': (bool,), # noqa: E501 - 'no_wait': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auto_delete': 'autoDelete', # noqa: E501 - 'durable': 'durable', # noqa: E501 - 'internal': 'internal', # noqa: E501 - 'no_wait': 'noWait', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auto_delete (bool): [optional] # noqa: E501 - durable (bool): [optional] # noqa: E501 - internal (bool): [optional] # noqa: E501 - no_wait (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auto_delete (bool): [optional] # noqa: E501 - durable (bool): [optional] # noqa: E501 - internal (bool): [optional] # noqa: E501 - no_wait (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py deleted file mode 100644 index 23675a1c82ed..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1AMQPQueueBindConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'no_wait': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'no_wait': 'noWait', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPQueueBindConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - no_wait (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPQueueBindConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - no_wait (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py deleted file mode 100644 index 589529111ca7..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'arguments': (str,), # noqa: E501 - 'auto_delete': (bool,), # noqa: E501 - 'durable': (bool,), # noqa: E501 - 'exclusive': (bool,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'no_wait': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'arguments': 'arguments', # noqa: E501 - 'auto_delete': 'autoDelete', # noqa: E501 - 'durable': 'durable', # noqa: E501 - 'exclusive': 'exclusive', # noqa: E501 - 'name': 'name', # noqa: E501 - 'no_wait': 'noWait', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (str): [optional] # noqa: E501 - auto_delete (bool): [optional] # noqa: E501 - durable (bool): [optional] # noqa: E501 - exclusive (bool): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - no_wait (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (str): [optional] # noqa: E501 - auto_delete (bool): [optional] # noqa: E501 - durable (bool): [optional] # noqa: E501 - exclusive (bool): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - no_wait (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_argo_workflow_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_argo_workflow_trigger.py deleted file mode 100644 index 3658a3e4e57c..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_argo_workflow_trigger.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1ArtifactLocation'] = IoArgoprojEventsV1alpha1ArtifactLocation - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - - -class IoArgoprojEventsV1alpha1ArgoWorkflowTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'args': ([str],), # noqa: E501 - 'operation': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'source': (IoArgoprojEventsV1alpha1ArtifactLocation,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'args': 'args', # noqa: E501 - 'operation': 'operation', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'source': 'source', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ArgoWorkflowTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): [optional] # noqa: E501 - operation (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - source (IoArgoprojEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ArgoWorkflowTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): [optional] # noqa: E501 - operation (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - source (IoArgoprojEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_artifact_location.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_artifact_location.py deleted file mode 100644 index 1f2c2e946883..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_artifact_location.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - from argo_workflows.model.io_argoproj_events_v1alpha1_file_artifact import IoArgoprojEventsV1alpha1FileArtifact - from argo_workflows.model.io_argoproj_events_v1alpha1_git_artifact import IoArgoprojEventsV1alpha1GitArtifact - from argo_workflows.model.io_argoproj_events_v1alpha1_resource import IoArgoprojEventsV1alpha1Resource - from argo_workflows.model.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact - from argo_workflows.model.io_argoproj_events_v1alpha1_url_artifact import IoArgoprojEventsV1alpha1URLArtifact - globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - globals()['IoArgoprojEventsV1alpha1FileArtifact'] = IoArgoprojEventsV1alpha1FileArtifact - globals()['IoArgoprojEventsV1alpha1GitArtifact'] = IoArgoprojEventsV1alpha1GitArtifact - globals()['IoArgoprojEventsV1alpha1Resource'] = IoArgoprojEventsV1alpha1Resource - globals()['IoArgoprojEventsV1alpha1S3Artifact'] = IoArgoprojEventsV1alpha1S3Artifact - globals()['IoArgoprojEventsV1alpha1URLArtifact'] = IoArgoprojEventsV1alpha1URLArtifact - - -class IoArgoprojEventsV1alpha1ArtifactLocation(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'configmap': (ConfigMapKeySelector,), # noqa: E501 - 'file': (IoArgoprojEventsV1alpha1FileArtifact,), # noqa: E501 - 'git': (IoArgoprojEventsV1alpha1GitArtifact,), # noqa: E501 - 'inline': (str,), # noqa: E501 - 'resource': (IoArgoprojEventsV1alpha1Resource,), # noqa: E501 - 's3': (IoArgoprojEventsV1alpha1S3Artifact,), # noqa: E501 - 'url': (IoArgoprojEventsV1alpha1URLArtifact,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'configmap': 'configmap', # noqa: E501 - 'file': 'file', # noqa: E501 - 'git': 'git', # noqa: E501 - 'inline': 'inline', # noqa: E501 - 'resource': 'resource', # noqa: E501 - 's3': 's3', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ArtifactLocation - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - configmap (ConfigMapKeySelector): [optional] # noqa: E501 - file (IoArgoprojEventsV1alpha1FileArtifact): [optional] # noqa: E501 - git (IoArgoprojEventsV1alpha1GitArtifact): [optional] # noqa: E501 - inline (str): [optional] # noqa: E501 - resource (IoArgoprojEventsV1alpha1Resource): [optional] # noqa: E501 - s3 (IoArgoprojEventsV1alpha1S3Artifact): [optional] # noqa: E501 - url (IoArgoprojEventsV1alpha1URLArtifact): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ArtifactLocation - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - configmap (ConfigMapKeySelector): [optional] # noqa: E501 - file (IoArgoprojEventsV1alpha1FileArtifact): [optional] # noqa: E501 - git (IoArgoprojEventsV1alpha1GitArtifact): [optional] # noqa: E501 - inline (str): [optional] # noqa: E501 - resource (IoArgoprojEventsV1alpha1Resource): [optional] # noqa: E501 - s3 (IoArgoprojEventsV1alpha1S3Artifact): [optional] # noqa: E501 - url (IoArgoprojEventsV1alpha1URLArtifact): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_aws_lambda_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_aws_lambda_trigger.py deleted file mode 100644 index 9c3805876c99..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_aws_lambda_trigger.py +++ /dev/null @@ -1,291 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1AWSLambdaTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_key': (SecretKeySelector,), # noqa: E501 - 'function_name': (str,), # noqa: E501 - 'invocation_type': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'region': (str,), # noqa: E501 - 'role_arn': (str,), # noqa: E501 - 'secret_key': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_key': 'accessKey', # noqa: E501 - 'function_name': 'functionName', # noqa: E501 - 'invocation_type': 'invocationType', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'payload': 'payload', # noqa: E501 - 'region': 'region', # noqa: E501 - 'role_arn': 'roleARN', # noqa: E501 - 'secret_key': 'secretKey', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AWSLambdaTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key (SecretKeySelector): [optional] # noqa: E501 - function_name (str): FunctionName refers to the name of the function to invoke.. [optional] # noqa: E501 - invocation_type (str): Choose from the following options. * RequestResponse (default) - Invoke the function synchronously. Keep the connection open until the function returns a response or times out. The API response includes the function response and additional data. * Event - Invoke the function asynchronously. Send events that fail multiple times to the function's dead-letter queue (if it's configured). The API response only includes a status code. * DryRun - Validate parameter values and verify that the user or role has permission to invoke the function. +optional. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - role_arn (str): [optional] # noqa: E501 - secret_key (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AWSLambdaTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key (SecretKeySelector): [optional] # noqa: E501 - function_name (str): FunctionName refers to the name of the function to invoke.. [optional] # noqa: E501 - invocation_type (str): Choose from the following options. * RequestResponse (default) - Invoke the function synchronously. Keep the connection open until the function returns a response or times out. The API response includes the function response and additional data. * Event - Invoke the function asynchronously. Send events that fail multiple times to the function's dead-letter queue (if it's configured). The API response only includes a status code. * DryRun - Validate parameter values and verify that the user or role has permission to invoke the function. +optional. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - role_arn (str): [optional] # noqa: E501 - secret_key (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py deleted file mode 100644 index 31711cb45c67..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1AzureEventHubsTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'fqdn': (str,), # noqa: E501 - 'hub_name': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'shared_access_key': (SecretKeySelector,), # noqa: E501 - 'shared_access_key_name': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'fqdn': 'fqdn', # noqa: E501 - 'hub_name': 'hubName', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'payload': 'payload', # noqa: E501 - 'shared_access_key': 'sharedAccessKey', # noqa: E501 - 'shared_access_key_name': 'sharedAccessKeyName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureEventHubsTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fqdn (str): [optional] # noqa: E501 - hub_name (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - shared_access_key (SecretKeySelector): [optional] # noqa: E501 - shared_access_key_name (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureEventHubsTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fqdn (str): [optional] # noqa: E501 - hub_name (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - shared_access_key (SecretKeySelector): [optional] # noqa: E501 - shared_access_key_name (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py deleted file mode 100644 index a4c54b585749..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1AzureEventsHubEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'fqdn': (str,), # noqa: E501 - 'hub_name': (str,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'shared_access_key': (SecretKeySelector,), # noqa: E501 - 'shared_access_key_name': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'filter': 'filter', # noqa: E501 - 'fqdn': 'fqdn', # noqa: E501 - 'hub_name': 'hubName', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'shared_access_key': 'sharedAccessKey', # noqa: E501 - 'shared_access_key_name': 'sharedAccessKeyName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureEventsHubEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - fqdn (str): [optional] # noqa: E501 - hub_name (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - shared_access_key (SecretKeySelector): [optional] # noqa: E501 - shared_access_key_name (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureEventsHubEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - fqdn (str): [optional] # noqa: E501 - hub_name (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - shared_access_key (SecretKeySelector): [optional] # noqa: E501 - shared_access_key_name (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py deleted file mode 100644 index ee10fbdf3c58..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1AzureQueueStorageEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'connection_string': (SecretKeySelector,), # noqa: E501 - 'decode_message': (bool,), # noqa: E501 - 'dlq': (bool,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'queue_name': (str,), # noqa: E501 - 'storage_account_name': (str,), # noqa: E501 - 'wait_time_in_seconds': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'connection_string': 'connectionString', # noqa: E501 - 'decode_message': 'decodeMessage', # noqa: E501 - 'dlq': 'dlq', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'queue_name': 'queueName', # noqa: E501 - 'storage_account_name': 'storageAccountName', # noqa: E501 - 'wait_time_in_seconds': 'waitTimeInSeconds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureQueueStorageEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - connection_string (SecretKeySelector): [optional] # noqa: E501 - decode_message (bool): [optional] # noqa: E501 - dlq (bool): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - queue_name (str): [optional] # noqa: E501 - storage_account_name (str): [optional] # noqa: E501 - wait_time_in_seconds (int): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureQueueStorageEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - connection_string (SecretKeySelector): [optional] # noqa: E501 - decode_message (bool): [optional] # noqa: E501 - dlq (bool): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - queue_name (str): [optional] # noqa: E501 - storage_account_name (str): [optional] # noqa: E501 - wait_time_in_seconds (int): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py deleted file mode 100644 index ca4a16b9b92c..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py +++ /dev/null @@ -1,297 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1AzureServiceBusEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'connection_string': (SecretKeySelector,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'fully_qualified_namespace': (str,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'queue_name': (str,), # noqa: E501 - 'subscription_name': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'topic_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'connection_string': 'connectionString', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'fully_qualified_namespace': 'fullyQualifiedNamespace', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'queue_name': 'queueName', # noqa: E501 - 'subscription_name': 'subscriptionName', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'topic_name': 'topicName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureServiceBusEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - connection_string (SecretKeySelector): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - fully_qualified_namespace (str): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - queue_name (str): [optional] # noqa: E501 - subscription_name (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureServiceBusEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - connection_string (SecretKeySelector): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - fully_qualified_namespace (str): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - queue_name (str): [optional] # noqa: E501 - subscription_name (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py deleted file mode 100644 index 19b67af614dd..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py +++ /dev/null @@ -1,289 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1AzureServiceBusTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'connection_string': (SecretKeySelector,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'queue_name': (str,), # noqa: E501 - 'subscription_name': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'topic_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'connection_string': 'connectionString', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'payload': 'payload', # noqa: E501 - 'queue_name': 'queueName', # noqa: E501 - 'subscription_name': 'subscriptionName', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'topic_name': 'topicName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureServiceBusTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - connection_string (SecretKeySelector): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - queue_name (str): [optional] # noqa: E501 - subscription_name (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureServiceBusTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - connection_string (SecretKeySelector): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - queue_name (str): [optional] # noqa: E501 - subscription_name (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_backoff.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_backoff.py deleted file mode 100644 index 347afcc7221d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_backoff.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_amount import IoArgoprojEventsV1alpha1Amount - from argo_workflows.model.io_argoproj_events_v1alpha1_int64_or_string import IoArgoprojEventsV1alpha1Int64OrString - globals()['IoArgoprojEventsV1alpha1Amount'] = IoArgoprojEventsV1alpha1Amount - globals()['IoArgoprojEventsV1alpha1Int64OrString'] = IoArgoprojEventsV1alpha1Int64OrString - - -class IoArgoprojEventsV1alpha1Backoff(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'duration': (IoArgoprojEventsV1alpha1Int64OrString,), # noqa: E501 - 'factor': (IoArgoprojEventsV1alpha1Amount,), # noqa: E501 - 'jitter': (IoArgoprojEventsV1alpha1Amount,), # noqa: E501 - 'steps': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'duration': 'duration', # noqa: E501 - 'factor': 'factor', # noqa: E501 - 'jitter': 'jitter', # noqa: E501 - 'steps': 'steps', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Backoff - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (IoArgoprojEventsV1alpha1Int64OrString): [optional] # noqa: E501 - factor (IoArgoprojEventsV1alpha1Amount): [optional] # noqa: E501 - jitter (IoArgoprojEventsV1alpha1Amount): [optional] # noqa: E501 - steps (int): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Backoff - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (IoArgoprojEventsV1alpha1Int64OrString): [optional] # noqa: E501 - factor (IoArgoprojEventsV1alpha1Amount): [optional] # noqa: E501 - jitter (IoArgoprojEventsV1alpha1Amount): [optional] # noqa: E501 - steps (int): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_basic_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_basic_auth.py deleted file mode 100644 index f92b06a3619e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_basic_auth.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1BasicAuth(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'password': (SecretKeySelector,), # noqa: E501 - 'username': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'password': 'password', # noqa: E501 - 'username': 'username', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BasicAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password (SecretKeySelector): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BasicAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password (SecretKeySelector): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_auth.py deleted file mode 100644 index 04f5b1e53f80..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_auth.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_basic_auth import IoArgoprojEventsV1alpha1BitbucketBasicAuth - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1BitbucketBasicAuth'] = IoArgoprojEventsV1alpha1BitbucketBasicAuth - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1BitbucketAuth(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'basic': (IoArgoprojEventsV1alpha1BitbucketBasicAuth,), # noqa: E501 - 'oauth_token': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'basic': 'basic', # noqa: E501 - 'oauth_token': 'oauthToken', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - basic (IoArgoprojEventsV1alpha1BitbucketBasicAuth): [optional] # noqa: E501 - oauth_token (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - basic (IoArgoprojEventsV1alpha1BitbucketBasicAuth): [optional] # noqa: E501 - oauth_token (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py deleted file mode 100644 index 916f7366d9c5..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1BitbucketBasicAuth(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'password': (SecretKeySelector,), # noqa: E501 - 'username': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'password': 'password', # noqa: E501 - 'username': 'username', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketBasicAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password (SecretKeySelector): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketBasicAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password (SecretKeySelector): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_event_source.py deleted file mode 100644 index 3939065eec16..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_event_source.py +++ /dev/null @@ -1,303 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_auth import IoArgoprojEventsV1alpha1BitbucketAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_repository import IoArgoprojEventsV1alpha1BitbucketRepository - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - globals()['IoArgoprojEventsV1alpha1BitbucketAuth'] = IoArgoprojEventsV1alpha1BitbucketAuth - globals()['IoArgoprojEventsV1alpha1BitbucketRepository'] = IoArgoprojEventsV1alpha1BitbucketRepository - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - - -class IoArgoprojEventsV1alpha1BitbucketEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth': (IoArgoprojEventsV1alpha1BitbucketAuth,), # noqa: E501 - 'delete_hook_on_finish': (bool,), # noqa: E501 - 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'owner': (str,), # noqa: E501 - 'project_key': (str,), # noqa: E501 - 'repositories': ([IoArgoprojEventsV1alpha1BitbucketRepository],), # noqa: E501 - 'repository_slug': (str,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth': 'auth', # noqa: E501 - 'delete_hook_on_finish': 'deleteHookOnFinish', # noqa: E501 - 'events': 'events', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'owner': 'owner', # noqa: E501 - 'project_key': 'projectKey', # noqa: E501 - 'repositories': 'repositories', # noqa: E501 - 'repository_slug': 'repositorySlug', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BitbucketAuth): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - events ([str]): Events this webhook is subscribed to.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - owner (str): [optional] # noqa: E501 - project_key (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1BitbucketRepository]): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BitbucketAuth): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - events ([str]): Events this webhook is subscribed to.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - owner (str): [optional] # noqa: E501 - project_key (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1BitbucketRepository]): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_repository.py deleted file mode 100644 index 9a41746db1a8..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_repository.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1BitbucketRepository(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'owner': (str,), # noqa: E501 - 'repository_slug': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'owner': 'owner', # noqa: E501 - 'repository_slug': 'repositorySlug', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - owner (str): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - owner (str): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py deleted file mode 100644 index f036bb0e8806..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py +++ /dev/null @@ -1,313 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_server_repository import IoArgoprojEventsV1alpha1BitbucketServerRepository - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1BitbucketServerRepository'] = IoArgoprojEventsV1alpha1BitbucketServerRepository - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1BitbucketServerEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_token': (SecretKeySelector,), # noqa: E501 - 'bitbucketserver_base_url': (str,), # noqa: E501 - 'delete_hook_on_finish': (bool,), # noqa: E501 - 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'project_key': (str,), # noqa: E501 - 'repositories': ([IoArgoprojEventsV1alpha1BitbucketServerRepository],), # noqa: E501 - 'repository_slug': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - 'webhook_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_token': 'accessToken', # noqa: E501 - 'bitbucketserver_base_url': 'bitbucketserverBaseURL', # noqa: E501 - 'delete_hook_on_finish': 'deleteHookOnFinish', # noqa: E501 - 'events': 'events', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'project_key': 'projectKey', # noqa: E501 - 'repositories': 'repositories', # noqa: E501 - 'repository_slug': 'repositorySlug', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - 'webhook_secret': 'webhookSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketServerEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_token (SecretKeySelector): [optional] # noqa: E501 - bitbucketserver_base_url (str): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - project_key (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1BitbucketServerRepository]): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - webhook_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketServerEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_token (SecretKeySelector): [optional] # noqa: E501 - bitbucketserver_base_url (str): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - project_key (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1BitbucketServerRepository]): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - webhook_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_repository.py deleted file mode 100644 index a913f26341d1..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_repository.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1BitbucketServerRepository(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'project_key': (str,), # noqa: E501 - 'repository_slug': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'project_key': 'projectKey', # noqa: E501 - 'repository_slug': 'repositorySlug', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketServerRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - project_key (str): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketServerRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - project_key (str): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_calendar_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_calendar_event_source.py deleted file mode 100644 index d62e991558ec..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_calendar_event_source.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_persistence import IoArgoprojEventsV1alpha1EventPersistence - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1EventPersistence'] = IoArgoprojEventsV1alpha1EventPersistence - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - - -class IoArgoprojEventsV1alpha1CalendarEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'exclusion_dates': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'interval': (str,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'persistence': (IoArgoprojEventsV1alpha1EventPersistence,), # noqa: E501 - 'schedule': (str,), # noqa: E501 - 'timezone': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'exclusion_dates': 'exclusionDates', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'interval': 'interval', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'persistence': 'persistence', # noqa: E501 - 'schedule': 'schedule', # noqa: E501 - 'timezone': 'timezone', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CalendarEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - exclusion_dates ([str]): ExclusionDates defines the list of DATE-TIME exceptions for recurring events.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - interval (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - persistence (IoArgoprojEventsV1alpha1EventPersistence): [optional] # noqa: E501 - schedule (str): [optional] # noqa: E501 - timezone (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CalendarEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - exclusion_dates ([str]): ExclusionDates defines the list of DATE-TIME exceptions for recurring events.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - interval (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - persistence (IoArgoprojEventsV1alpha1EventPersistence): [optional] # noqa: E501 - schedule (str): [optional] # noqa: E501 - timezone (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py deleted file mode 100644 index b2aa2cb7bb84..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1CatchupConfiguration(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'enabled': (bool,), # noqa: E501 - 'max_duration': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'enabled': 'enabled', # noqa: E501 - 'max_duration': 'maxDuration', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CatchupConfiguration - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - enabled (bool): [optional] # noqa: E501 - max_duration (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CatchupConfiguration - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - enabled (bool): [optional] # noqa: E501 - max_duration (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_condition.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_condition.py deleted file mode 100644 index 07df4c4bd186..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_condition.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1Condition(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'last_transition_time': (datetime,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'reason': (str,), # noqa: E501 - 'status': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'last_transition_time': 'lastTransitionTime', # noqa: E501 - 'message': 'message', # noqa: E501 - 'reason': 'reason', # noqa: E501 - 'status': 'status', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Condition - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - last_transition_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - reason (str): [optional] # noqa: E501 - status (str): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Condition - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - last_transition_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - reason (str): [optional] # noqa: E501 - status (str): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_by_time.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_by_time.py deleted file mode 100644 index 1e0270d7cc4f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_by_time.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1ConditionsResetByTime(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'cron': (str,), # noqa: E501 - 'timezone': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cron': 'cron', # noqa: E501 - 'timezone': 'timezone', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConditionsResetByTime - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cron (str): [optional] # noqa: E501 - timezone (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConditionsResetByTime - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cron (str): [optional] # noqa: E501 - timezone (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_criteria.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_criteria.py deleted file mode 100644 index 0bf7d9e76916..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_criteria.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_conditions_reset_by_time import IoArgoprojEventsV1alpha1ConditionsResetByTime - globals()['IoArgoprojEventsV1alpha1ConditionsResetByTime'] = IoArgoprojEventsV1alpha1ConditionsResetByTime - - -class IoArgoprojEventsV1alpha1ConditionsResetCriteria(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'by_time': (IoArgoprojEventsV1alpha1ConditionsResetByTime,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'by_time': 'byTime', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConditionsResetCriteria - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - by_time (IoArgoprojEventsV1alpha1ConditionsResetByTime): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConditionsResetCriteria - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - by_time (IoArgoprojEventsV1alpha1ConditionsResetByTime): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_config_map_persistence.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_config_map_persistence.py deleted file mode 100644 index a394297c566d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_config_map_persistence.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1ConfigMapPersistence(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'create_if_not_exist': (bool,), # noqa: E501 - 'name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'create_if_not_exist': 'createIfNotExist', # noqa: E501 - 'name': 'name', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConfigMapPersistence - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_if_not_exist (bool): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConfigMapPersistence - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_if_not_exist (bool): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_custom_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_custom_trigger.py deleted file mode 100644 index ee11737c1aae..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_custom_trigger.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1CustomTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'cert_secret': (SecretKeySelector,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'secure': (bool,), # noqa: E501 - 'server_name_override': (str,), # noqa: E501 - 'server_url': (str,), # noqa: E501 - 'spec': ({str: (str,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cert_secret': 'certSecret', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'payload': 'payload', # noqa: E501 - 'secure': 'secure', # noqa: E501 - 'server_name_override': 'serverNameOverride', # noqa: E501 - 'server_url': 'serverURL', # noqa: E501 - 'spec': 'spec', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CustomTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cert_secret (SecretKeySelector): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved custom trigger trigger object.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - secure (bool): [optional] # noqa: E501 - server_name_override (str): ServerNameOverride for the secure connection between sensor and custom trigger gRPC server.. [optional] # noqa: E501 - server_url (str): [optional] # noqa: E501 - spec ({str: (str,)}): Spec is the custom trigger resource specification that custom trigger gRPC server knows how to interpret.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CustomTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cert_secret (SecretKeySelector): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved custom trigger trigger object.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - secure (bool): [optional] # noqa: E501 - server_name_override (str): ServerNameOverride for the secure connection between sensor and custom trigger gRPC server.. [optional] # noqa: E501 - server_url (str): [optional] # noqa: E501 - spec ({str: (str,)}): Spec is the custom trigger resource specification that custom trigger gRPC server knows how to interpret.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_data_filter.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_data_filter.py deleted file mode 100644 index 551820efed73..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_data_filter.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1DataFilter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'comparator': (str,), # noqa: E501 - 'path': (str,), # noqa: E501 - 'template': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - 'value': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'comparator': 'comparator', # noqa: E501 - 'path': 'path', # noqa: E501 - 'template': 'template', # noqa: E501 - 'type': 'type', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1DataFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - comparator (str): Comparator compares the event data with a user given value. Can be \">=\", \">\", \"=\", \"!=\", \"<\", or \"<=\". Is optional, and if left blank treated as equality \"=\".. [optional] # noqa: E501 - path (str): Path is the JSONPath of the event's (JSON decoded) data key Path is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.. [optional] # noqa: E501 - template (str): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - value ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1DataFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - comparator (str): Comparator compares the event data with a user given value. Can be \">=\", \">\", \"=\", \"!=\", \"<\", or \"<=\". Is optional, and if left blank treated as equality \"=\".. [optional] # noqa: E501 - path (str): Path is the JSONPath of the event's (JSON decoded) data key Path is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.. [optional] # noqa: E501 - template (str): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - value ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_email_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_email_trigger.py deleted file mode 100644 index 9c14217b56a2..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_email_trigger.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1EmailTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'body': (str,), # noqa: E501 - '_from': (str,), # noqa: E501 - 'host': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'port': (int,), # noqa: E501 - 'smtp_password': (SecretKeySelector,), # noqa: E501 - 'subject': (str,), # noqa: E501 - 'to': ([str],), # noqa: E501 - 'username': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'body': 'body', # noqa: E501 - '_from': 'from', # noqa: E501 - 'host': 'host', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'port': 'port', # noqa: E501 - 'smtp_password': 'smtpPassword', # noqa: E501 - 'subject': 'subject', # noqa: E501 - 'to': 'to', # noqa: E501 - 'username': 'username', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EmailTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - body (str): [optional] # noqa: E501 - _from (str): [optional] # noqa: E501 - host (str): Host refers to the smtp host url to which email is send.. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - port (int): [optional] # noqa: E501 - smtp_password (SecretKeySelector): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - to ([str]): [optional] # noqa: E501 - username (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EmailTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - body (str): [optional] # noqa: E501 - _from (str): [optional] # noqa: E501 - host (str): Host refers to the smtp host url to which email is send.. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - port (int): [optional] # noqa: E501 - smtp_password (SecretKeySelector): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - to ([str]): [optional] # noqa: E501 - username (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_emitter_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_emitter_event_source.py deleted file mode 100644 index c51403b60782..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_emitter_event_source.py +++ /dev/null @@ -1,303 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1EmitterEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'broker': (str,), # noqa: E501 - 'channel_key': (str,), # noqa: E501 - 'channel_name': (str,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'password': (SecretKeySelector,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'username': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'broker': 'broker', # noqa: E501 - 'channel_key': 'channelKey', # noqa: E501 - 'channel_name': 'channelName', # noqa: E501 - 'connection_backoff': 'connectionBackoff', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'password': 'password', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'username': 'username', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EmitterEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - broker (str): Broker URI to connect to.. [optional] # noqa: E501 - channel_key (str): [optional] # noqa: E501 - channel_name (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EmitterEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - broker (str): Broker URI to connect to.. [optional] # noqa: E501 - channel_key (str): [optional] # noqa: E501 - channel_name (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_context.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_context.py deleted file mode 100644 index 407be77edd35..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_context.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1EventContext(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'datacontenttype': (str,), # noqa: E501 - 'id': (str,), # noqa: E501 - 'source': (str,), # noqa: E501 - 'specversion': (str,), # noqa: E501 - 'subject': (str,), # noqa: E501 - 'time': (datetime,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'datacontenttype': 'datacontenttype', # noqa: E501 - 'id': 'id', # noqa: E501 - 'source': 'source', # noqa: E501 - 'specversion': 'specversion', # noqa: E501 - 'subject': 'subject', # noqa: E501 - 'time': 'time', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventContext - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - datacontenttype (str): DataContentType - A MIME (RFC2046) string describing the media type of `data`.. [optional] # noqa: E501 - id (str): ID of the event; must be non-empty and unique within the scope of the producer.. [optional] # noqa: E501 - source (str): Source - A URI describing the event producer.. [optional] # noqa: E501 - specversion (str): SpecVersion - The version of the CloudEvents specification used by the io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - type (str): Type - The type of the occurrence which has happened.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventContext - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - datacontenttype (str): DataContentType - A MIME (RFC2046) string describing the media type of `data`.. [optional] # noqa: E501 - id (str): ID of the event; must be non-empty and unique within the scope of the producer.. [optional] # noqa: E501 - source (str): Source - A URI describing the event producer.. [optional] # noqa: E501 - specversion (str): SpecVersion - The version of the CloudEvents specification used by the io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - type (str): Type - The type of the occurrence which has happened.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency.py deleted file mode 100644 index f75cdcd89a61..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency_filter import IoArgoprojEventsV1alpha1EventDependencyFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency_transformer import IoArgoprojEventsV1alpha1EventDependencyTransformer - globals()['IoArgoprojEventsV1alpha1EventDependencyFilter'] = IoArgoprojEventsV1alpha1EventDependencyFilter - globals()['IoArgoprojEventsV1alpha1EventDependencyTransformer'] = IoArgoprojEventsV1alpha1EventDependencyTransformer - - -class IoArgoprojEventsV1alpha1EventDependency(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'event_name': (str,), # noqa: E501 - 'event_source_name': (str,), # noqa: E501 - 'filters': (IoArgoprojEventsV1alpha1EventDependencyFilter,), # noqa: E501 - 'filters_logical_operator': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'transform': (IoArgoprojEventsV1alpha1EventDependencyTransformer,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'event_name': 'eventName', # noqa: E501 - 'event_source_name': 'eventSourceName', # noqa: E501 - 'filters': 'filters', # noqa: E501 - 'filters_logical_operator': 'filtersLogicalOperator', # noqa: E501 - 'name': 'name', # noqa: E501 - 'transform': 'transform', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependency - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_name (str): [optional] # noqa: E501 - event_source_name (str): [optional] # noqa: E501 - filters (IoArgoprojEventsV1alpha1EventDependencyFilter): [optional] # noqa: E501 - filters_logical_operator (str): FiltersLogicalOperator defines how different filters are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - transform (IoArgoprojEventsV1alpha1EventDependencyTransformer): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependency - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_name (str): [optional] # noqa: E501 - event_source_name (str): [optional] # noqa: E501 - filters (IoArgoprojEventsV1alpha1EventDependencyFilter): [optional] # noqa: E501 - filters_logical_operator (str): FiltersLogicalOperator defines how different filters are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - transform (IoArgoprojEventsV1alpha1EventDependencyTransformer): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_filter.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_filter.py deleted file mode 100644 index 335a7f9b9a9a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_filter.py +++ /dev/null @@ -1,291 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_data_filter import IoArgoprojEventsV1alpha1DataFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_event_context import IoArgoprojEventsV1alpha1EventContext - from argo_workflows.model.io_argoproj_events_v1alpha1_expr_filter import IoArgoprojEventsV1alpha1ExprFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_time_filter import IoArgoprojEventsV1alpha1TimeFilter - globals()['IoArgoprojEventsV1alpha1DataFilter'] = IoArgoprojEventsV1alpha1DataFilter - globals()['IoArgoprojEventsV1alpha1EventContext'] = IoArgoprojEventsV1alpha1EventContext - globals()['IoArgoprojEventsV1alpha1ExprFilter'] = IoArgoprojEventsV1alpha1ExprFilter - globals()['IoArgoprojEventsV1alpha1TimeFilter'] = IoArgoprojEventsV1alpha1TimeFilter - - -class IoArgoprojEventsV1alpha1EventDependencyFilter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'context': (IoArgoprojEventsV1alpha1EventContext,), # noqa: E501 - 'data': ([IoArgoprojEventsV1alpha1DataFilter],), # noqa: E501 - 'data_logical_operator': (str,), # noqa: E501 - 'expr_logical_operator': (str,), # noqa: E501 - 'exprs': ([IoArgoprojEventsV1alpha1ExprFilter],), # noqa: E501 - 'script': (str,), # noqa: E501 - 'time': (IoArgoprojEventsV1alpha1TimeFilter,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'context': 'context', # noqa: E501 - 'data': 'data', # noqa: E501 - 'data_logical_operator': 'dataLogicalOperator', # noqa: E501 - 'expr_logical_operator': 'exprLogicalOperator', # noqa: E501 - 'exprs': 'exprs', # noqa: E501 - 'script': 'script', # noqa: E501 - 'time': 'time', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependencyFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - context (IoArgoprojEventsV1alpha1EventContext): [optional] # noqa: E501 - data ([IoArgoprojEventsV1alpha1DataFilter]): [optional] # noqa: E501 - data_logical_operator (str): DataLogicalOperator defines how multiple Data filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 - expr_logical_operator (str): ExprLogicalOperator defines how multiple Exprs filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 - exprs ([IoArgoprojEventsV1alpha1ExprFilter]): Exprs contains the list of expressions evaluated against the event payload.. [optional] # noqa: E501 - script (str): Script refers to a Lua script evaluated to determine the validity of an io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - time (IoArgoprojEventsV1alpha1TimeFilter): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependencyFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - context (IoArgoprojEventsV1alpha1EventContext): [optional] # noqa: E501 - data ([IoArgoprojEventsV1alpha1DataFilter]): [optional] # noqa: E501 - data_logical_operator (str): DataLogicalOperator defines how multiple Data filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 - expr_logical_operator (str): ExprLogicalOperator defines how multiple Exprs filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 - exprs ([IoArgoprojEventsV1alpha1ExprFilter]): Exprs contains the list of expressions evaluated against the event payload.. [optional] # noqa: E501 - script (str): Script refers to a Lua script evaluated to determine the validity of an io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - time (IoArgoprojEventsV1alpha1TimeFilter): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_transformer.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_transformer.py deleted file mode 100644 index 0412a1b0f59b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_transformer.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1EventDependencyTransformer(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'jq': (str,), # noqa: E501 - 'script': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'jq': 'jq', # noqa: E501 - 'script': 'script', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependencyTransformer - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - jq (str): [optional] # noqa: E501 - script (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependencyTransformer - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - jq (str): [optional] # noqa: E501 - script (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_persistence.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_persistence.py deleted file mode 100644 index 7a750f023c47..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_persistence.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_catchup_configuration import IoArgoprojEventsV1alpha1CatchupConfiguration - from argo_workflows.model.io_argoproj_events_v1alpha1_config_map_persistence import IoArgoprojEventsV1alpha1ConfigMapPersistence - globals()['IoArgoprojEventsV1alpha1CatchupConfiguration'] = IoArgoprojEventsV1alpha1CatchupConfiguration - globals()['IoArgoprojEventsV1alpha1ConfigMapPersistence'] = IoArgoprojEventsV1alpha1ConfigMapPersistence - - -class IoArgoprojEventsV1alpha1EventPersistence(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'catchup': (IoArgoprojEventsV1alpha1CatchupConfiguration,), # noqa: E501 - 'config_map': (IoArgoprojEventsV1alpha1ConfigMapPersistence,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'catchup': 'catchup', # noqa: E501 - 'config_map': 'configMap', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventPersistence - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - catchup (IoArgoprojEventsV1alpha1CatchupConfiguration): [optional] # noqa: E501 - config_map (IoArgoprojEventsV1alpha1ConfigMapPersistence): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventPersistence - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - catchup (IoArgoprojEventsV1alpha1CatchupConfiguration): [optional] # noqa: E501 - config_map (IoArgoprojEventsV1alpha1ConfigMapPersistence): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source.py deleted file mode 100644 index e0ccea56f660..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_spec import IoArgoprojEventsV1alpha1EventSourceSpec - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_status import IoArgoprojEventsV1alpha1EventSourceStatus - from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojEventsV1alpha1EventSourceSpec'] = IoArgoprojEventsV1alpha1EventSourceSpec - globals()['IoArgoprojEventsV1alpha1EventSourceStatus'] = IoArgoprojEventsV1alpha1EventSourceStatus - globals()['ObjectMeta'] = ObjectMeta - - -class IoArgoprojEventsV1alpha1EventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (IoArgoprojEventsV1alpha1EventSourceSpec,), # noqa: E501 - 'status': (IoArgoprojEventsV1alpha1EventSourceStatus,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - spec (IoArgoprojEventsV1alpha1EventSourceSpec): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1EventSourceStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - spec (IoArgoprojEventsV1alpha1EventSourceSpec): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1EventSourceStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_filter.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_filter.py deleted file mode 100644 index 3cd42535b945..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_filter.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1EventSourceFilter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'expression': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'expression': 'expression', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - expression (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - expression (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_list.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_list.py deleted file mode 100644 index 831798b34304..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_list.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource - from argo_workflows.model.list_meta import ListMeta - globals()['IoArgoprojEventsV1alpha1EventSource'] = IoArgoprojEventsV1alpha1EventSource - globals()['ListMeta'] = ListMeta - - -class IoArgoprojEventsV1alpha1EventSourceList(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([IoArgoprojEventsV1alpha1EventSource],), # noqa: E501 - 'metadata': (ListMeta,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceList - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([IoArgoprojEventsV1alpha1EventSource]): [optional] # noqa: E501 - metadata (ListMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceList - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([IoArgoprojEventsV1alpha1EventSource]): [optional] # noqa: E501 - metadata (ListMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_spec.py deleted file mode 100644 index a36b07152fe1..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_spec.py +++ /dev/null @@ -1,461 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_event_source import IoArgoprojEventsV1alpha1AMQPEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_events_hub_event_source import IoArgoprojEventsV1alpha1AzureEventsHubEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_queue_storage_event_source import IoArgoprojEventsV1alpha1AzureQueueStorageEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_service_bus_event_source import IoArgoprojEventsV1alpha1AzureServiceBusEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_event_source import IoArgoprojEventsV1alpha1BitbucketEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_server_event_source import IoArgoprojEventsV1alpha1BitbucketServerEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_calendar_event_source import IoArgoprojEventsV1alpha1CalendarEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_emitter_event_source import IoArgoprojEventsV1alpha1EmitterEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_file_event_source import IoArgoprojEventsV1alpha1FileEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_generic_event_source import IoArgoprojEventsV1alpha1GenericEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_gerrit_event_source import IoArgoprojEventsV1alpha1GerritEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_github_event_source import IoArgoprojEventsV1alpha1GithubEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_gitlab_event_source import IoArgoprojEventsV1alpha1GitlabEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_hdfs_event_source import IoArgoprojEventsV1alpha1HDFSEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_event_source import IoArgoprojEventsV1alpha1KafkaEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_mqtt_event_source import IoArgoprojEventsV1alpha1MQTTEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_nats_events_source import IoArgoprojEventsV1alpha1NATSEventsSource - from argo_workflows.model.io_argoproj_events_v1alpha1_nsq_event_source import IoArgoprojEventsV1alpha1NSQEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_pub_sub_event_source import IoArgoprojEventsV1alpha1PubSubEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_pulsar_event_source import IoArgoprojEventsV1alpha1PulsarEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_redis_event_source import IoArgoprojEventsV1alpha1RedisEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_redis_stream_event_source import IoArgoprojEventsV1alpha1RedisStreamEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_resource_event_source import IoArgoprojEventsV1alpha1ResourceEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact - from argo_workflows.model.io_argoproj_events_v1alpha1_service import IoArgoprojEventsV1alpha1Service - from argo_workflows.model.io_argoproj_events_v1alpha1_sftp_event_source import IoArgoprojEventsV1alpha1SFTPEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_slack_event_source import IoArgoprojEventsV1alpha1SlackEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_sns_event_source import IoArgoprojEventsV1alpha1SNSEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_sqs_event_source import IoArgoprojEventsV1alpha1SQSEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_storage_grid_event_source import IoArgoprojEventsV1alpha1StorageGridEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_stripe_event_source import IoArgoprojEventsV1alpha1StripeEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_event_source import IoArgoprojEventsV1alpha1WebhookEventSource - globals()['IoArgoprojEventsV1alpha1AMQPEventSource'] = IoArgoprojEventsV1alpha1AMQPEventSource - globals()['IoArgoprojEventsV1alpha1AzureEventsHubEventSource'] = IoArgoprojEventsV1alpha1AzureEventsHubEventSource - globals()['IoArgoprojEventsV1alpha1AzureQueueStorageEventSource'] = IoArgoprojEventsV1alpha1AzureQueueStorageEventSource - globals()['IoArgoprojEventsV1alpha1AzureServiceBusEventSource'] = IoArgoprojEventsV1alpha1AzureServiceBusEventSource - globals()['IoArgoprojEventsV1alpha1BitbucketEventSource'] = IoArgoprojEventsV1alpha1BitbucketEventSource - globals()['IoArgoprojEventsV1alpha1BitbucketServerEventSource'] = IoArgoprojEventsV1alpha1BitbucketServerEventSource - globals()['IoArgoprojEventsV1alpha1CalendarEventSource'] = IoArgoprojEventsV1alpha1CalendarEventSource - globals()['IoArgoprojEventsV1alpha1EmitterEventSource'] = IoArgoprojEventsV1alpha1EmitterEventSource - globals()['IoArgoprojEventsV1alpha1FileEventSource'] = IoArgoprojEventsV1alpha1FileEventSource - globals()['IoArgoprojEventsV1alpha1GenericEventSource'] = IoArgoprojEventsV1alpha1GenericEventSource - globals()['IoArgoprojEventsV1alpha1GerritEventSource'] = IoArgoprojEventsV1alpha1GerritEventSource - globals()['IoArgoprojEventsV1alpha1GithubEventSource'] = IoArgoprojEventsV1alpha1GithubEventSource - globals()['IoArgoprojEventsV1alpha1GitlabEventSource'] = IoArgoprojEventsV1alpha1GitlabEventSource - globals()['IoArgoprojEventsV1alpha1HDFSEventSource'] = IoArgoprojEventsV1alpha1HDFSEventSource - globals()['IoArgoprojEventsV1alpha1KafkaEventSource'] = IoArgoprojEventsV1alpha1KafkaEventSource - globals()['IoArgoprojEventsV1alpha1MQTTEventSource'] = IoArgoprojEventsV1alpha1MQTTEventSource - globals()['IoArgoprojEventsV1alpha1NATSEventsSource'] = IoArgoprojEventsV1alpha1NATSEventsSource - globals()['IoArgoprojEventsV1alpha1NSQEventSource'] = IoArgoprojEventsV1alpha1NSQEventSource - globals()['IoArgoprojEventsV1alpha1PubSubEventSource'] = IoArgoprojEventsV1alpha1PubSubEventSource - globals()['IoArgoprojEventsV1alpha1PulsarEventSource'] = IoArgoprojEventsV1alpha1PulsarEventSource - globals()['IoArgoprojEventsV1alpha1RedisEventSource'] = IoArgoprojEventsV1alpha1RedisEventSource - globals()['IoArgoprojEventsV1alpha1RedisStreamEventSource'] = IoArgoprojEventsV1alpha1RedisStreamEventSource - globals()['IoArgoprojEventsV1alpha1ResourceEventSource'] = IoArgoprojEventsV1alpha1ResourceEventSource - globals()['IoArgoprojEventsV1alpha1S3Artifact'] = IoArgoprojEventsV1alpha1S3Artifact - globals()['IoArgoprojEventsV1alpha1SFTPEventSource'] = IoArgoprojEventsV1alpha1SFTPEventSource - globals()['IoArgoprojEventsV1alpha1SNSEventSource'] = IoArgoprojEventsV1alpha1SNSEventSource - globals()['IoArgoprojEventsV1alpha1SQSEventSource'] = IoArgoprojEventsV1alpha1SQSEventSource - globals()['IoArgoprojEventsV1alpha1Service'] = IoArgoprojEventsV1alpha1Service - globals()['IoArgoprojEventsV1alpha1SlackEventSource'] = IoArgoprojEventsV1alpha1SlackEventSource - globals()['IoArgoprojEventsV1alpha1StorageGridEventSource'] = IoArgoprojEventsV1alpha1StorageGridEventSource - globals()['IoArgoprojEventsV1alpha1StripeEventSource'] = IoArgoprojEventsV1alpha1StripeEventSource - globals()['IoArgoprojEventsV1alpha1Template'] = IoArgoprojEventsV1alpha1Template - globals()['IoArgoprojEventsV1alpha1WebhookEventSource'] = IoArgoprojEventsV1alpha1WebhookEventSource - - -class IoArgoprojEventsV1alpha1EventSourceSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'amqp': ({str: (IoArgoprojEventsV1alpha1AMQPEventSource,)},), # noqa: E501 - 'azure_events_hub': ({str: (IoArgoprojEventsV1alpha1AzureEventsHubEventSource,)},), # noqa: E501 - 'azure_queue_storage': ({str: (IoArgoprojEventsV1alpha1AzureQueueStorageEventSource,)},), # noqa: E501 - 'azure_service_bus': ({str: (IoArgoprojEventsV1alpha1AzureServiceBusEventSource,)},), # noqa: E501 - 'bitbucket': ({str: (IoArgoprojEventsV1alpha1BitbucketEventSource,)},), # noqa: E501 - 'bitbucketserver': ({str: (IoArgoprojEventsV1alpha1BitbucketServerEventSource,)},), # noqa: E501 - 'calendar': ({str: (IoArgoprojEventsV1alpha1CalendarEventSource,)},), # noqa: E501 - 'emitter': ({str: (IoArgoprojEventsV1alpha1EmitterEventSource,)},), # noqa: E501 - 'event_bus_name': (str,), # noqa: E501 - 'file': ({str: (IoArgoprojEventsV1alpha1FileEventSource,)},), # noqa: E501 - 'generic': ({str: (IoArgoprojEventsV1alpha1GenericEventSource,)},), # noqa: E501 - 'gerrit': ({str: (IoArgoprojEventsV1alpha1GerritEventSource,)},), # noqa: E501 - 'github': ({str: (IoArgoprojEventsV1alpha1GithubEventSource,)},), # noqa: E501 - 'gitlab': ({str: (IoArgoprojEventsV1alpha1GitlabEventSource,)},), # noqa: E501 - 'hdfs': ({str: (IoArgoprojEventsV1alpha1HDFSEventSource,)},), # noqa: E501 - 'kafka': ({str: (IoArgoprojEventsV1alpha1KafkaEventSource,)},), # noqa: E501 - 'minio': ({str: (IoArgoprojEventsV1alpha1S3Artifact,)},), # noqa: E501 - 'mqtt': ({str: (IoArgoprojEventsV1alpha1MQTTEventSource,)},), # noqa: E501 - 'nats': ({str: (IoArgoprojEventsV1alpha1NATSEventsSource,)},), # noqa: E501 - 'nsq': ({str: (IoArgoprojEventsV1alpha1NSQEventSource,)},), # noqa: E501 - 'pub_sub': ({str: (IoArgoprojEventsV1alpha1PubSubEventSource,)},), # noqa: E501 - 'pulsar': ({str: (IoArgoprojEventsV1alpha1PulsarEventSource,)},), # noqa: E501 - 'redis': ({str: (IoArgoprojEventsV1alpha1RedisEventSource,)},), # noqa: E501 - 'redis_stream': ({str: (IoArgoprojEventsV1alpha1RedisStreamEventSource,)},), # noqa: E501 - 'replicas': (int,), # noqa: E501 - 'resource': ({str: (IoArgoprojEventsV1alpha1ResourceEventSource,)},), # noqa: E501 - 'service': (IoArgoprojEventsV1alpha1Service,), # noqa: E501 - 'sftp': ({str: (IoArgoprojEventsV1alpha1SFTPEventSource,)},), # noqa: E501 - 'slack': ({str: (IoArgoprojEventsV1alpha1SlackEventSource,)},), # noqa: E501 - 'sns': ({str: (IoArgoprojEventsV1alpha1SNSEventSource,)},), # noqa: E501 - 'sqs': ({str: (IoArgoprojEventsV1alpha1SQSEventSource,)},), # noqa: E501 - 'storage_grid': ({str: (IoArgoprojEventsV1alpha1StorageGridEventSource,)},), # noqa: E501 - 'stripe': ({str: (IoArgoprojEventsV1alpha1StripeEventSource,)},), # noqa: E501 - 'template': (IoArgoprojEventsV1alpha1Template,), # noqa: E501 - 'webhook': ({str: (IoArgoprojEventsV1alpha1WebhookEventSource,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'amqp': 'amqp', # noqa: E501 - 'azure_events_hub': 'azureEventsHub', # noqa: E501 - 'azure_queue_storage': 'azureQueueStorage', # noqa: E501 - 'azure_service_bus': 'azureServiceBus', # noqa: E501 - 'bitbucket': 'bitbucket', # noqa: E501 - 'bitbucketserver': 'bitbucketserver', # noqa: E501 - 'calendar': 'calendar', # noqa: E501 - 'emitter': 'emitter', # noqa: E501 - 'event_bus_name': 'eventBusName', # noqa: E501 - 'file': 'file', # noqa: E501 - 'generic': 'generic', # noqa: E501 - 'gerrit': 'gerrit', # noqa: E501 - 'github': 'github', # noqa: E501 - 'gitlab': 'gitlab', # noqa: E501 - 'hdfs': 'hdfs', # noqa: E501 - 'kafka': 'kafka', # noqa: E501 - 'minio': 'minio', # noqa: E501 - 'mqtt': 'mqtt', # noqa: E501 - 'nats': 'nats', # noqa: E501 - 'nsq': 'nsq', # noqa: E501 - 'pub_sub': 'pubSub', # noqa: E501 - 'pulsar': 'pulsar', # noqa: E501 - 'redis': 'redis', # noqa: E501 - 'redis_stream': 'redisStream', # noqa: E501 - 'replicas': 'replicas', # noqa: E501 - 'resource': 'resource', # noqa: E501 - 'service': 'service', # noqa: E501 - 'sftp': 'sftp', # noqa: E501 - 'slack': 'slack', # noqa: E501 - 'sns': 'sns', # noqa: E501 - 'sqs': 'sqs', # noqa: E501 - 'storage_grid': 'storageGrid', # noqa: E501 - 'stripe': 'stripe', # noqa: E501 - 'template': 'template', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - amqp ({str: (IoArgoprojEventsV1alpha1AMQPEventSource,)}): [optional] # noqa: E501 - azure_events_hub ({str: (IoArgoprojEventsV1alpha1AzureEventsHubEventSource,)}): [optional] # noqa: E501 - azure_queue_storage ({str: (IoArgoprojEventsV1alpha1AzureQueueStorageEventSource,)}): [optional] # noqa: E501 - azure_service_bus ({str: (IoArgoprojEventsV1alpha1AzureServiceBusEventSource,)}): [optional] # noqa: E501 - bitbucket ({str: (IoArgoprojEventsV1alpha1BitbucketEventSource,)}): [optional] # noqa: E501 - bitbucketserver ({str: (IoArgoprojEventsV1alpha1BitbucketServerEventSource,)}): [optional] # noqa: E501 - calendar ({str: (IoArgoprojEventsV1alpha1CalendarEventSource,)}): [optional] # noqa: E501 - emitter ({str: (IoArgoprojEventsV1alpha1EmitterEventSource,)}): [optional] # noqa: E501 - event_bus_name (str): [optional] # noqa: E501 - file ({str: (IoArgoprojEventsV1alpha1FileEventSource,)}): [optional] # noqa: E501 - generic ({str: (IoArgoprojEventsV1alpha1GenericEventSource,)}): [optional] # noqa: E501 - gerrit ({str: (IoArgoprojEventsV1alpha1GerritEventSource,)}): [optional] # noqa: E501 - github ({str: (IoArgoprojEventsV1alpha1GithubEventSource,)}): [optional] # noqa: E501 - gitlab ({str: (IoArgoprojEventsV1alpha1GitlabEventSource,)}): [optional] # noqa: E501 - hdfs ({str: (IoArgoprojEventsV1alpha1HDFSEventSource,)}): [optional] # noqa: E501 - kafka ({str: (IoArgoprojEventsV1alpha1KafkaEventSource,)}): [optional] # noqa: E501 - minio ({str: (IoArgoprojEventsV1alpha1S3Artifact,)}): [optional] # noqa: E501 - mqtt ({str: (IoArgoprojEventsV1alpha1MQTTEventSource,)}): [optional] # noqa: E501 - nats ({str: (IoArgoprojEventsV1alpha1NATSEventsSource,)}): [optional] # noqa: E501 - nsq ({str: (IoArgoprojEventsV1alpha1NSQEventSource,)}): [optional] # noqa: E501 - pub_sub ({str: (IoArgoprojEventsV1alpha1PubSubEventSource,)}): [optional] # noqa: E501 - pulsar ({str: (IoArgoprojEventsV1alpha1PulsarEventSource,)}): [optional] # noqa: E501 - redis ({str: (IoArgoprojEventsV1alpha1RedisEventSource,)}): [optional] # noqa: E501 - redis_stream ({str: (IoArgoprojEventsV1alpha1RedisStreamEventSource,)}): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - resource ({str: (IoArgoprojEventsV1alpha1ResourceEventSource,)}): [optional] # noqa: E501 - service (IoArgoprojEventsV1alpha1Service): [optional] # noqa: E501 - sftp ({str: (IoArgoprojEventsV1alpha1SFTPEventSource,)}): [optional] # noqa: E501 - slack ({str: (IoArgoprojEventsV1alpha1SlackEventSource,)}): [optional] # noqa: E501 - sns ({str: (IoArgoprojEventsV1alpha1SNSEventSource,)}): [optional] # noqa: E501 - sqs ({str: (IoArgoprojEventsV1alpha1SQSEventSource,)}): [optional] # noqa: E501 - storage_grid ({str: (IoArgoprojEventsV1alpha1StorageGridEventSource,)}): [optional] # noqa: E501 - stripe ({str: (IoArgoprojEventsV1alpha1StripeEventSource,)}): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1Template): [optional] # noqa: E501 - webhook ({str: (IoArgoprojEventsV1alpha1WebhookEventSource,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - amqp ({str: (IoArgoprojEventsV1alpha1AMQPEventSource,)}): [optional] # noqa: E501 - azure_events_hub ({str: (IoArgoprojEventsV1alpha1AzureEventsHubEventSource,)}): [optional] # noqa: E501 - azure_queue_storage ({str: (IoArgoprojEventsV1alpha1AzureQueueStorageEventSource,)}): [optional] # noqa: E501 - azure_service_bus ({str: (IoArgoprojEventsV1alpha1AzureServiceBusEventSource,)}): [optional] # noqa: E501 - bitbucket ({str: (IoArgoprojEventsV1alpha1BitbucketEventSource,)}): [optional] # noqa: E501 - bitbucketserver ({str: (IoArgoprojEventsV1alpha1BitbucketServerEventSource,)}): [optional] # noqa: E501 - calendar ({str: (IoArgoprojEventsV1alpha1CalendarEventSource,)}): [optional] # noqa: E501 - emitter ({str: (IoArgoprojEventsV1alpha1EmitterEventSource,)}): [optional] # noqa: E501 - event_bus_name (str): [optional] # noqa: E501 - file ({str: (IoArgoprojEventsV1alpha1FileEventSource,)}): [optional] # noqa: E501 - generic ({str: (IoArgoprojEventsV1alpha1GenericEventSource,)}): [optional] # noqa: E501 - gerrit ({str: (IoArgoprojEventsV1alpha1GerritEventSource,)}): [optional] # noqa: E501 - github ({str: (IoArgoprojEventsV1alpha1GithubEventSource,)}): [optional] # noqa: E501 - gitlab ({str: (IoArgoprojEventsV1alpha1GitlabEventSource,)}): [optional] # noqa: E501 - hdfs ({str: (IoArgoprojEventsV1alpha1HDFSEventSource,)}): [optional] # noqa: E501 - kafka ({str: (IoArgoprojEventsV1alpha1KafkaEventSource,)}): [optional] # noqa: E501 - minio ({str: (IoArgoprojEventsV1alpha1S3Artifact,)}): [optional] # noqa: E501 - mqtt ({str: (IoArgoprojEventsV1alpha1MQTTEventSource,)}): [optional] # noqa: E501 - nats ({str: (IoArgoprojEventsV1alpha1NATSEventsSource,)}): [optional] # noqa: E501 - nsq ({str: (IoArgoprojEventsV1alpha1NSQEventSource,)}): [optional] # noqa: E501 - pub_sub ({str: (IoArgoprojEventsV1alpha1PubSubEventSource,)}): [optional] # noqa: E501 - pulsar ({str: (IoArgoprojEventsV1alpha1PulsarEventSource,)}): [optional] # noqa: E501 - redis ({str: (IoArgoprojEventsV1alpha1RedisEventSource,)}): [optional] # noqa: E501 - redis_stream ({str: (IoArgoprojEventsV1alpha1RedisStreamEventSource,)}): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - resource ({str: (IoArgoprojEventsV1alpha1ResourceEventSource,)}): [optional] # noqa: E501 - service (IoArgoprojEventsV1alpha1Service): [optional] # noqa: E501 - sftp ({str: (IoArgoprojEventsV1alpha1SFTPEventSource,)}): [optional] # noqa: E501 - slack ({str: (IoArgoprojEventsV1alpha1SlackEventSource,)}): [optional] # noqa: E501 - sns ({str: (IoArgoprojEventsV1alpha1SNSEventSource,)}): [optional] # noqa: E501 - sqs ({str: (IoArgoprojEventsV1alpha1SQSEventSource,)}): [optional] # noqa: E501 - storage_grid ({str: (IoArgoprojEventsV1alpha1StorageGridEventSource,)}): [optional] # noqa: E501 - stripe ({str: (IoArgoprojEventsV1alpha1StripeEventSource,)}): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1Template): [optional] # noqa: E501 - webhook ({str: (IoArgoprojEventsV1alpha1WebhookEventSource,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_status.py deleted file mode 100644 index f2f0e349af17..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_status.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status - globals()['IoArgoprojEventsV1alpha1Status'] = IoArgoprojEventsV1alpha1Status - - -class IoArgoprojEventsV1alpha1EventSourceStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'status': (IoArgoprojEventsV1alpha1Status,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - status (IoArgoprojEventsV1alpha1Status): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - status (IoArgoprojEventsV1alpha1Status): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_expr_filter.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_expr_filter.py deleted file mode 100644 index 5416f95c7cc0..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_expr_filter.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_payload_field import IoArgoprojEventsV1alpha1PayloadField - globals()['IoArgoprojEventsV1alpha1PayloadField'] = IoArgoprojEventsV1alpha1PayloadField - - -class IoArgoprojEventsV1alpha1ExprFilter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'expr': (str,), # noqa: E501 - 'fields': ([IoArgoprojEventsV1alpha1PayloadField],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'expr': 'expr', # noqa: E501 - 'fields': 'fields', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ExprFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - expr (str): Expr refers to the expression that determines the outcome of the filter.. [optional] # noqa: E501 - fields ([IoArgoprojEventsV1alpha1PayloadField]): Fields refers to set of keys that refer to the paths within event payload.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ExprFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - expr (str): Expr refers to the expression that determines the outcome of the filter.. [optional] # noqa: E501 - fields ([IoArgoprojEventsV1alpha1PayloadField]): Fields refers to set of keys that refer to the paths within event payload.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_artifact.py deleted file mode 100644 index 20d7c7e94cf7..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_artifact.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1FileArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'path': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'path': 'path', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1FileArtifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - path (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1FileArtifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - path (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_event_source.py deleted file mode 100644 index 9c181014e3ae..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_event_source.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WatchPathConfig'] = IoArgoprojEventsV1alpha1WatchPathConfig - - -class IoArgoprojEventsV1alpha1FileEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'event_type': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'polling': (bool,), # noqa: E501 - 'watch_path_config': (IoArgoprojEventsV1alpha1WatchPathConfig,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'event_type': 'eventType', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'polling': 'polling', # noqa: E501 - 'watch_path_config': 'watchPathConfig', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1FileEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - polling (bool): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1FileEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - polling (bool): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_generic_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_generic_event_source.py deleted file mode 100644 index b65fa8ee43e3..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_generic_event_source.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1GenericEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth_secret': (SecretKeySelector,), # noqa: E501 - 'config': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'insecure': (bool,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth_secret': 'authSecret', # noqa: E501 - 'config': 'config', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'insecure': 'insecure', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GenericEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth_secret (SecretKeySelector): [optional] # noqa: E501 - config (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - insecure (bool): Insecure determines the type of connection.. [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - url (str): URL of the gRPC server that implements the event source.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GenericEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth_secret (SecretKeySelector): [optional] # noqa: E501 - config (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - insecure (bool): Insecure determines the type of connection.. [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - url (str): URL of the gRPC server that implements the event source.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gerrit_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gerrit_event_source.py deleted file mode 100644 index bfc6c009aaeb..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gerrit_event_source.py +++ /dev/null @@ -1,301 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - - -class IoArgoprojEventsV1alpha1GerritEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 - 'delete_hook_on_finish': (bool,), # noqa: E501 - 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'gerrit_base_url': (str,), # noqa: E501 - 'hook_name': (str,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'projects': ([str],), # noqa: E501 - 'ssl_verify': (bool,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth': 'auth', # noqa: E501 - 'delete_hook_on_finish': 'deleteHookOnFinish', # noqa: E501 - 'events': 'events', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'gerrit_base_url': 'gerritBaseURL', # noqa: E501 - 'hook_name': 'hookName', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'projects': 'projects', # noqa: E501 - 'ssl_verify': 'sslVerify', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GerritEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - gerrit_base_url (str): [optional] # noqa: E501 - hook_name (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - projects ([str]): List of project namespace paths like \"whynowy/test\".. [optional] # noqa: E501 - ssl_verify (bool): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GerritEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - gerrit_base_url (str): [optional] # noqa: E501 - hook_name (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - projects ([str]): List of project namespace paths like \"whynowy/test\".. [optional] # noqa: E501 - ssl_verify (bool): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_artifact.py deleted file mode 100644 index 5d87658c70c0..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_artifact.py +++ /dev/null @@ -1,301 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_git_creds import IoArgoprojEventsV1alpha1GitCreds - from argo_workflows.model.io_argoproj_events_v1alpha1_git_remote_config import IoArgoprojEventsV1alpha1GitRemoteConfig - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1GitCreds'] = IoArgoprojEventsV1alpha1GitCreds - globals()['IoArgoprojEventsV1alpha1GitRemoteConfig'] = IoArgoprojEventsV1alpha1GitRemoteConfig - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1GitArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'branch': (str,), # noqa: E501 - 'clone_directory': (str,), # noqa: E501 - 'creds': (IoArgoprojEventsV1alpha1GitCreds,), # noqa: E501 - 'file_path': (str,), # noqa: E501 - 'insecure_ignore_host_key': (bool,), # noqa: E501 - 'ref': (str,), # noqa: E501 - 'remote': (IoArgoprojEventsV1alpha1GitRemoteConfig,), # noqa: E501 - 'ssh_key_secret': (SecretKeySelector,), # noqa: E501 - 'tag': (str,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'branch': 'branch', # noqa: E501 - 'clone_directory': 'cloneDirectory', # noqa: E501 - 'creds': 'creds', # noqa: E501 - 'file_path': 'filePath', # noqa: E501 - 'insecure_ignore_host_key': 'insecureIgnoreHostKey', # noqa: E501 - 'ref': 'ref', # noqa: E501 - 'remote': 'remote', # noqa: E501 - 'ssh_key_secret': 'sshKeySecret', # noqa: E501 - 'tag': 'tag', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitArtifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - branch (str): [optional] # noqa: E501 - clone_directory (str): Directory to clone the repository. We clone complete directory because GitArtifact is not limited to any specific Git service providers. Hence we don't use any specific git provider client.. [optional] # noqa: E501 - creds (IoArgoprojEventsV1alpha1GitCreds): [optional] # noqa: E501 - file_path (str): [optional] # noqa: E501 - insecure_ignore_host_key (bool): [optional] # noqa: E501 - ref (str): [optional] # noqa: E501 - remote (IoArgoprojEventsV1alpha1GitRemoteConfig): [optional] # noqa: E501 - ssh_key_secret (SecretKeySelector): [optional] # noqa: E501 - tag (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitArtifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - branch (str): [optional] # noqa: E501 - clone_directory (str): Directory to clone the repository. We clone complete directory because GitArtifact is not limited to any specific Git service providers. Hence we don't use any specific git provider client.. [optional] # noqa: E501 - creds (IoArgoprojEventsV1alpha1GitCreds): [optional] # noqa: E501 - file_path (str): [optional] # noqa: E501 - insecure_ignore_host_key (bool): [optional] # noqa: E501 - ref (str): [optional] # noqa: E501 - remote (IoArgoprojEventsV1alpha1GitRemoteConfig): [optional] # noqa: E501 - ssh_key_secret (SecretKeySelector): [optional] # noqa: E501 - tag (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_creds.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_creds.py deleted file mode 100644 index 65eb7f70ab91..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_creds.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1GitCreds(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'password': (SecretKeySelector,), # noqa: E501 - 'username': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'password': 'password', # noqa: E501 - 'username': 'username', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitCreds - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password (SecretKeySelector): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitCreds - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password (SecretKeySelector): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_remote_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_remote_config.py deleted file mode 100644 index d02db3151c8a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_remote_config.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1GitRemoteConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'urls': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'urls': 'urls', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitRemoteConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the remote to fetch from.. [optional] # noqa: E501 - urls ([str]): URLs the URLs of a remote repository. It must be non-empty. Fetch will always use the first URL, while push will use all of them.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitRemoteConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the remote to fetch from.. [optional] # noqa: E501 - urls ([str]): URLs the URLs of a remote repository. It must be non-empty. Fetch will always use the first URL, while push will use all of them.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_app_creds.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_app_creds.py deleted file mode 100644 index 79739c6ac094..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_app_creds.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1GithubAppCreds(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'app_id': (str,), # noqa: E501 - 'installation_id': (str,), # noqa: E501 - 'private_key': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'app_id': 'appID', # noqa: E501 - 'installation_id': 'installationID', # noqa: E501 - 'private_key': 'privateKey', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GithubAppCreds - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - app_id (str): [optional] # noqa: E501 - installation_id (str): [optional] # noqa: E501 - private_key (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GithubAppCreds - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - app_id (str): [optional] # noqa: E501 - installation_id (str): [optional] # noqa: E501 - private_key (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_event_source.py deleted file mode 100644 index 300c34b2da48..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_event_source.py +++ /dev/null @@ -1,337 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_github_app_creds import IoArgoprojEventsV1alpha1GithubAppCreds - from argo_workflows.model.io_argoproj_events_v1alpha1_owned_repositories import IoArgoprojEventsV1alpha1OwnedRepositories - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1GithubAppCreds'] = IoArgoprojEventsV1alpha1GithubAppCreds - globals()['IoArgoprojEventsV1alpha1OwnedRepositories'] = IoArgoprojEventsV1alpha1OwnedRepositories - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1GithubEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'active': (bool,), # noqa: E501 - 'api_token': (SecretKeySelector,), # noqa: E501 - 'content_type': (str,), # noqa: E501 - 'delete_hook_on_finish': (bool,), # noqa: E501 - 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'github_app': (IoArgoprojEventsV1alpha1GithubAppCreds,), # noqa: E501 - 'github_base_url': (str,), # noqa: E501 - 'github_upload_url': (str,), # noqa: E501 - 'id': (str,), # noqa: E501 - 'insecure': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'organizations': ([str],), # noqa: E501 - 'owner': (str,), # noqa: E501 - 'repositories': ([IoArgoprojEventsV1alpha1OwnedRepositories],), # noqa: E501 - 'repository': (str,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - 'webhook_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'active': 'active', # noqa: E501 - 'api_token': 'apiToken', # noqa: E501 - 'content_type': 'contentType', # noqa: E501 - 'delete_hook_on_finish': 'deleteHookOnFinish', # noqa: E501 - 'events': 'events', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'github_app': 'githubApp', # noqa: E501 - 'github_base_url': 'githubBaseURL', # noqa: E501 - 'github_upload_url': 'githubUploadURL', # noqa: E501 - 'id': 'id', # noqa: E501 - 'insecure': 'insecure', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'organizations': 'organizations', # noqa: E501 - 'owner': 'owner', # noqa: E501 - 'repositories': 'repositories', # noqa: E501 - 'repository': 'repository', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - 'webhook_secret': 'webhookSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GithubEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - active (bool): [optional] # noqa: E501 - api_token (SecretKeySelector): [optional] # noqa: E501 - content_type (str): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - github_app (IoArgoprojEventsV1alpha1GithubAppCreds): [optional] # noqa: E501 - github_base_url (str): [optional] # noqa: E501 - github_upload_url (str): [optional] # noqa: E501 - id (str): [optional] # noqa: E501 - insecure (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - organizations ([str]): Organizations holds the names of organizations (used for organization level webhooks). Not required if Repositories is set.. [optional] # noqa: E501 - owner (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1OwnedRepositories]): Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set.. [optional] # noqa: E501 - repository (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - webhook_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GithubEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - active (bool): [optional] # noqa: E501 - api_token (SecretKeySelector): [optional] # noqa: E501 - content_type (str): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - github_app (IoArgoprojEventsV1alpha1GithubAppCreds): [optional] # noqa: E501 - github_base_url (str): [optional] # noqa: E501 - github_upload_url (str): [optional] # noqa: E501 - id (str): [optional] # noqa: E501 - insecure (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - organizations ([str]): Organizations holds the names of organizations (used for organization level webhooks). Not required if Repositories is set.. [optional] # noqa: E501 - owner (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1OwnedRepositories]): Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set.. [optional] # noqa: E501 - repository (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - webhook_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gitlab_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gitlab_event_source.py deleted file mode 100644 index 87ad404dd030..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gitlab_event_source.py +++ /dev/null @@ -1,309 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1GitlabEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_token': (SecretKeySelector,), # noqa: E501 - 'delete_hook_on_finish': (bool,), # noqa: E501 - 'enable_ssl_verification': (bool,), # noqa: E501 - 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'gitlab_base_url': (str,), # noqa: E501 - 'groups': ([str],), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'project_id': (str,), # noqa: E501 - 'projects': ([str],), # noqa: E501 - 'secret_token': (SecretKeySelector,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_token': 'accessToken', # noqa: E501 - 'delete_hook_on_finish': 'deleteHookOnFinish', # noqa: E501 - 'enable_ssl_verification': 'enableSSLVerification', # noqa: E501 - 'events': 'events', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'gitlab_base_url': 'gitlabBaseURL', # noqa: E501 - 'groups': 'groups', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'project_id': 'projectID', # noqa: E501 - 'projects': 'projects', # noqa: E501 - 'secret_token': 'secretToken', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitlabEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_token (SecretKeySelector): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - enable_ssl_verification (bool): [optional] # noqa: E501 - events ([str]): Events are gitlab event to listen to. Refer https://github.com/xanzy/go-gitlab/blob/bf34eca5d13a9f4c3f501d8a97b8ac226d55e4d9/projects.go#L794.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - gitlab_base_url (str): [optional] # noqa: E501 - groups ([str]): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - project_id (str): [optional] # noqa: E501 - projects ([str]): [optional] # noqa: E501 - secret_token (SecretKeySelector): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitlabEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_token (SecretKeySelector): [optional] # noqa: E501 - delete_hook_on_finish (bool): [optional] # noqa: E501 - enable_ssl_verification (bool): [optional] # noqa: E501 - events ([str]): Events are gitlab event to listen to. Refer https://github.com/xanzy/go-gitlab/blob/bf34eca5d13a9f4c3f501d8a97b8ac226d55e4d9/projects.go#L794.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - gitlab_base_url (str): [optional] # noqa: E501 - groups ([str]): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - project_id (str): [optional] # noqa: E501 - projects ([str]): [optional] # noqa: E501 - secret_token (SecretKeySelector): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_hdfs_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_hdfs_event_source.py deleted file mode 100644 index dc5fff99079d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_hdfs_event_source.py +++ /dev/null @@ -1,315 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WatchPathConfig'] = IoArgoprojEventsV1alpha1WatchPathConfig - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1HDFSEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'addresses': ([str],), # noqa: E501 - 'check_interval': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'hdfs_user': (str,), # noqa: E501 - 'krb_c_cache_secret': (SecretKeySelector,), # noqa: E501 - 'krb_config_config_map': (ConfigMapKeySelector,), # noqa: E501 - 'krb_keytab_secret': (SecretKeySelector,), # noqa: E501 - 'krb_realm': (str,), # noqa: E501 - 'krb_service_principal_name': (str,), # noqa: E501 - 'krb_username': (str,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'type': (str,), # noqa: E501 - 'watch_path_config': (IoArgoprojEventsV1alpha1WatchPathConfig,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'addresses': 'addresses', # noqa: E501 - 'check_interval': 'checkInterval', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'hdfs_user': 'hdfsUser', # noqa: E501 - 'krb_c_cache_secret': 'krbCCacheSecret', # noqa: E501 - 'krb_config_config_map': 'krbConfigConfigMap', # noqa: E501 - 'krb_keytab_secret': 'krbKeytabSecret', # noqa: E501 - 'krb_realm': 'krbRealm', # noqa: E501 - 'krb_service_principal_name': 'krbServicePrincipalName', # noqa: E501 - 'krb_username': 'krbUsername', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'type': 'type', # noqa: E501 - 'watch_path_config': 'watchPathConfig', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1HDFSEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - addresses ([str]): [optional] # noqa: E501 - check_interval (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - hdfs_user (str): HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.. [optional] # noqa: E501 - krb_c_cache_secret (SecretKeySelector): [optional] # noqa: E501 - krb_config_config_map (ConfigMapKeySelector): [optional] # noqa: E501 - krb_keytab_secret (SecretKeySelector): [optional] # noqa: E501 - krb_realm (str): KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - krb_service_principal_name (str): KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used.. [optional] # noqa: E501 - krb_username (str): KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1HDFSEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - addresses ([str]): [optional] # noqa: E501 - check_interval (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - hdfs_user (str): HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.. [optional] # noqa: E501 - krb_c_cache_secret (SecretKeySelector): [optional] # noqa: E501 - krb_config_config_map (ConfigMapKeySelector): [optional] # noqa: E501 - krb_keytab_secret (SecretKeySelector): [optional] # noqa: E501 - krb_realm (str): KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - krb_service_principal_name (str): KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used.. [optional] # noqa: E501 - krb_username (str): KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_http_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_http_trigger.py deleted file mode 100644 index a972868e5139..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_http_trigger.py +++ /dev/null @@ -1,299 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_secure_header import IoArgoprojEventsV1alpha1SecureHeader - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1SecureHeader'] = IoArgoprojEventsV1alpha1SecureHeader - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - - -class IoArgoprojEventsV1alpha1HTTPTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'basic_auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 - 'headers': ({str: (str,)},), # noqa: E501 - 'method': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'secure_headers': ([IoArgoprojEventsV1alpha1SecureHeader],), # noqa: E501 - 'timeout': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'basic_auth': 'basicAuth', # noqa: E501 - 'headers': 'headers', # noqa: E501 - 'method': 'method', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'payload': 'payload', # noqa: E501 - 'secure_headers': 'secureHeaders', # noqa: E501 - 'timeout': 'timeout', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1HTTPTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - basic_auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - headers ({str: (str,)}): [optional] # noqa: E501 - method (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - secure_headers ([IoArgoprojEventsV1alpha1SecureHeader]): [optional] # noqa: E501 - timeout (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - url (str): URL refers to the URL to send HTTP request to.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1HTTPTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - basic_auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - headers ({str: (str,)}): [optional] # noqa: E501 - method (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - secure_headers ([IoArgoprojEventsV1alpha1SecureHeader]): [optional] # noqa: E501 - timeout (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - url (str): URL refers to the URL to send HTTP request to.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_int64_or_string.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_int64_or_string.py deleted file mode 100644 index 710ec4d27a1a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_int64_or_string.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1Int64OrString(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'int64_val': (str,), # noqa: E501 - 'str_val': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'int64_val': 'int64Val', # noqa: E501 - 'str_val': 'strVal', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Int64OrString - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - int64_val (str): [optional] # noqa: E501 - str_val (str): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Int64OrString - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - int64_val (str): [optional] # noqa: E501 - str_val (str): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_k8_s_resource_policy.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_k8_s_resource_policy.py deleted file mode 100644 index 5461dac117b7..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_k8_s_resource_policy.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - - -class IoArgoprojEventsV1alpha1K8SResourcePolicy(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'error_on_backoff_timeout': (bool,), # noqa: E501 - 'labels': ({str: (str,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'backoff': 'backoff', # noqa: E501 - 'error_on_backoff_timeout': 'errorOnBackoffTimeout', # noqa: E501 - 'labels': 'labels', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1K8SResourcePolicy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - error_on_backoff_timeout (bool): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1K8SResourcePolicy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - error_on_backoff_timeout (bool): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_consumer_group.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_consumer_group.py deleted file mode 100644 index 648263a68d7a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_consumer_group.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1KafkaConsumerGroup(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'group_name': (str,), # noqa: E501 - 'oldest': (bool,), # noqa: E501 - 'rebalance_strategy': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'group_name': 'groupName', # noqa: E501 - 'oldest': 'oldest', # noqa: E501 - 'rebalance_strategy': 'rebalanceStrategy', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaConsumerGroup - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - group_name (str): [optional] # noqa: E501 - oldest (bool): [optional] # noqa: E501 - rebalance_strategy (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaConsumerGroup - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - group_name (str): [optional] # noqa: E501 - oldest (bool): [optional] # noqa: E501 - rebalance_strategy (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_event_source.py deleted file mode 100644 index dc63af404426..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_event_source.py +++ /dev/null @@ -1,317 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_consumer_group import IoArgoprojEventsV1alpha1KafkaConsumerGroup - from argo_workflows.model.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1KafkaConsumerGroup'] = IoArgoprojEventsV1alpha1KafkaConsumerGroup - globals()['IoArgoprojEventsV1alpha1SASLConfig'] = IoArgoprojEventsV1alpha1SASLConfig - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - - -class IoArgoprojEventsV1alpha1KafkaEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'config': (str,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'consumer_group': (IoArgoprojEventsV1alpha1KafkaConsumerGroup,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'limit_events_per_second': (str,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'partition': (str,), # noqa: E501 - 'sasl': (IoArgoprojEventsV1alpha1SASLConfig,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'topic': (str,), # noqa: E501 - 'url': (str,), # noqa: E501 - 'version': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'config': 'config', # noqa: E501 - 'connection_backoff': 'connectionBackoff', # noqa: E501 - 'consumer_group': 'consumerGroup', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'limit_events_per_second': 'limitEventsPerSecond', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'partition': 'partition', # noqa: E501 - 'sasl': 'sasl', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'topic': 'topic', # noqa: E501 - 'url': 'url', # noqa: E501 - 'version': 'version', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config (str): Yaml format Sarama config for Kafka connection. It follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go e.g. consumer: fetch: min: 1 net: MaxOpenRequests: 5 +optional. [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - consumer_group (IoArgoprojEventsV1alpha1KafkaConsumerGroup): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - limit_events_per_second (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - partition (str): [optional] # noqa: E501 - sasl (IoArgoprojEventsV1alpha1SASLConfig): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - version (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config (str): Yaml format Sarama config for Kafka connection. It follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go e.g. consumer: fetch: min: 1 net: MaxOpenRequests: 5 +optional. [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - consumer_group (IoArgoprojEventsV1alpha1KafkaConsumerGroup): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - limit_events_per_second (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - partition (str): [optional] # noqa: E501 - sasl (IoArgoprojEventsV1alpha1SASLConfig): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - version (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_trigger.py deleted file mode 100644 index 08038859f24c..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_trigger.py +++ /dev/null @@ -1,315 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_schema_registry_config import IoArgoprojEventsV1alpha1SchemaRegistryConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1SASLConfig'] = IoArgoprojEventsV1alpha1SASLConfig - globals()['IoArgoprojEventsV1alpha1SchemaRegistryConfig'] = IoArgoprojEventsV1alpha1SchemaRegistryConfig - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - - -class IoArgoprojEventsV1alpha1KafkaTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'compress': (bool,), # noqa: E501 - 'flush_frequency': (int,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'partition': (int,), # noqa: E501 - 'partitioning_key': (str,), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'required_acks': (int,), # noqa: E501 - 'sasl': (IoArgoprojEventsV1alpha1SASLConfig,), # noqa: E501 - 'schema_registry': (IoArgoprojEventsV1alpha1SchemaRegistryConfig,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'topic': (str,), # noqa: E501 - 'url': (str,), # noqa: E501 - 'version': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'compress': 'compress', # noqa: E501 - 'flush_frequency': 'flushFrequency', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'partition': 'partition', # noqa: E501 - 'partitioning_key': 'partitioningKey', # noqa: E501 - 'payload': 'payload', # noqa: E501 - 'required_acks': 'requiredAcks', # noqa: E501 - 'sasl': 'sasl', # noqa: E501 - 'schema_registry': 'schemaRegistry', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'topic': 'topic', # noqa: E501 - 'url': 'url', # noqa: E501 - 'version': 'version', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - compress (bool): [optional] # noqa: E501 - flush_frequency (int): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 - partition (int): [optional] # noqa: E501 - partitioning_key (str): The partitioning key for the messages put on the Kafka topic. +optional.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - required_acks (int): RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional.. [optional] # noqa: E501 - sasl (IoArgoprojEventsV1alpha1SASLConfig): [optional] # noqa: E501 - schema_registry (IoArgoprojEventsV1alpha1SchemaRegistryConfig): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - url (str): URL of the Kafka broker, multiple URLs separated by comma.. [optional] # noqa: E501 - version (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - compress (bool): [optional] # noqa: E501 - flush_frequency (int): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 - partition (int): [optional] # noqa: E501 - partitioning_key (str): The partitioning key for the messages put on the Kafka topic. +optional.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - required_acks (int): RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional.. [optional] # noqa: E501 - sasl (IoArgoprojEventsV1alpha1SASLConfig): [optional] # noqa: E501 - schema_registry (IoArgoprojEventsV1alpha1SchemaRegistryConfig): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - url (str): URL of the Kafka broker, multiple URLs separated by comma.. [optional] # noqa: E501 - version (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_log_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_log_trigger.py deleted file mode 100644 index 1a51a4fbb283..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_log_trigger.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1LogTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'interval_seconds': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'interval_seconds': 'intervalSeconds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1LogTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - interval_seconds (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1LogTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - interval_seconds (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_metadata.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_metadata.py deleted file mode 100644 index 3a57a5d60c6a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_metadata.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1Metadata(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'annotations': ({str: (str,)},), # noqa: E501 - 'labels': ({str: (str,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'annotations': 'annotations', # noqa: E501 - 'labels': 'labels', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Metadata - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Metadata - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_mqtt_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_mqtt_event_source.py deleted file mode 100644 index 91a7ce39220e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_mqtt_event_source.py +++ /dev/null @@ -1,299 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - - -class IoArgoprojEventsV1alpha1MQTTEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 - 'client_id': (str,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'topic': (str,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth': 'auth', # noqa: E501 - 'client_id': 'clientId', # noqa: E501 - 'connection_backoff': 'connectionBackoff', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'topic': 'topic', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1MQTTEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - client_id (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1MQTTEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - client_id (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_auth.py deleted file mode 100644 index e066d7dae76c..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_auth.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1NATSAuth(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'basic': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 - 'credential': (SecretKeySelector,), # noqa: E501 - 'nkey': (SecretKeySelector,), # noqa: E501 - 'token': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'basic': 'basic', # noqa: E501 - 'credential': 'credential', # noqa: E501 - 'nkey': 'nkey', # noqa: E501 - 'token': 'token', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - basic (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - credential (SecretKeySelector): [optional] # noqa: E501 - nkey (SecretKeySelector): [optional] # noqa: E501 - token (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - basic (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - credential (SecretKeySelector): [optional] # noqa: E501 - nkey (SecretKeySelector): [optional] # noqa: E501 - token (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_events_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_events_source.py deleted file mode 100644 index 7534ec59b2eb..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_events_source.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_nats_auth import IoArgoprojEventsV1alpha1NATSAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1NATSAuth'] = IoArgoprojEventsV1alpha1NATSAuth - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - - -class IoArgoprojEventsV1alpha1NATSEventsSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth': (IoArgoprojEventsV1alpha1NATSAuth,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'subject': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth': 'auth', # noqa: E501 - 'connection_backoff': 'connectionBackoff', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'subject': 'subject', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSEventsSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1NATSAuth): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSEventsSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1NATSAuth): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_trigger.py deleted file mode 100644 index fe91ddb51d42..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_trigger.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - - -class IoArgoprojEventsV1alpha1NATSTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'subject': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'parameters': 'parameters', # noqa: E501 - 'payload': 'payload', # noqa: E501 - 'subject': 'subject', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - subject (str): Name of the subject to put message on.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - url (str): URL of the NATS cluster.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - subject (str): Name of the subject to put message on.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - url (str): URL of the NATS cluster.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nsq_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nsq_event_source.py deleted file mode 100644 index f3e0513de680..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nsq_event_source.py +++ /dev/null @@ -1,293 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - - -class IoArgoprojEventsV1alpha1NSQEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'channel': (str,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'host_address': (str,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'topic': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'channel': 'channel', # noqa: E501 - 'connection_backoff': 'connectionBackoff', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'host_address': 'hostAddress', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'topic': 'topic', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NSQEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - channel (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - host_address (str): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic (str): Topic to subscribe to.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NSQEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - channel (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - host_address (str): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - topic (str): Topic to subscribe to.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_open_whisk_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_open_whisk_trigger.py deleted file mode 100644 index 893b248f29ba..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_open_whisk_trigger.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1OpenWhiskTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'action_name': (str,), # noqa: E501 - 'auth_token': (SecretKeySelector,), # noqa: E501 - 'host': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'version': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'action_name': 'actionName', # noqa: E501 - 'auth_token': 'authToken', # noqa: E501 - 'host': 'host', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'payload': 'payload', # noqa: E501 - 'version': 'version', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1OpenWhiskTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - action_name (str): Name of the action/function.. [optional] # noqa: E501 - auth_token (SecretKeySelector): [optional] # noqa: E501 - host (str): Host URL of the OpenWhisk.. [optional] # noqa: E501 - namespace (str): Namespace for the action. Defaults to \"_\". +optional.. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - version (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1OpenWhiskTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - action_name (str): Name of the action/function.. [optional] # noqa: E501 - auth_token (SecretKeySelector): [optional] # noqa: E501 - host (str): Host URL of the OpenWhisk.. [optional] # noqa: E501 - namespace (str): Namespace for the action. Defaults to \"_\". +optional.. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - version (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_owned_repositories.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_owned_repositories.py deleted file mode 100644 index fb7556ac1f1a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_owned_repositories.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1OwnedRepositories(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'names': ([str],), # noqa: E501 - 'owner': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'names': 'names', # noqa: E501 - 'owner': 'owner', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1OwnedRepositories - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - names ([str]): [optional] # noqa: E501 - owner (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1OwnedRepositories - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - names ([str]): [optional] # noqa: E501 - owner (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_payload_field.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_payload_field.py deleted file mode 100644 index 78ade3573f30..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_payload_field.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1PayloadField(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'path': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'path': 'path', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PayloadField - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name acts as key that holds the value at the path.. [optional] # noqa: E501 - path (str): Path is the JSONPath of the event's (JSON decoded) data key Path is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PayloadField - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name acts as key that holds the value at the path.. [optional] # noqa: E501 - path (str): Path is the JSONPath of the event's (JSON decoded) data key Path is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pub_sub_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pub_sub_event_source.py deleted file mode 100644 index 5badd3c68fac..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pub_sub_event_source.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1PubSubEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'credential_secret': (SecretKeySelector,), # noqa: E501 - 'delete_subscription_on_finish': (bool,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'project_id': (str,), # noqa: E501 - 'subscription_id': (str,), # noqa: E501 - 'topic': (str,), # noqa: E501 - 'topic_project_id': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'credential_secret': 'credentialSecret', # noqa: E501 - 'delete_subscription_on_finish': 'deleteSubscriptionOnFinish', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'project_id': 'projectID', # noqa: E501 - 'subscription_id': 'subscriptionID', # noqa: E501 - 'topic': 'topic', # noqa: E501 - 'topic_project_id': 'topicProjectID', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PubSubEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - credential_secret (SecretKeySelector): [optional] # noqa: E501 - delete_subscription_on_finish (bool): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - project_id (str): [optional] # noqa: E501 - subscription_id (str): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - topic_project_id (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PubSubEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - credential_secret (SecretKeySelector): [optional] # noqa: E501 - delete_subscription_on_finish (bool): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - project_id (str): [optional] # noqa: E501 - subscription_id (str): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - topic_project_id (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_event_source.py deleted file mode 100644 index 8d4a934bbe30..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_event_source.py +++ /dev/null @@ -1,319 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1PulsarEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth_athenz_params': ({str: (str,)},), # noqa: E501 - 'auth_athenz_secret': (SecretKeySelector,), # noqa: E501 - 'auth_token_secret': (SecretKeySelector,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'tls_allow_insecure_connection': (bool,), # noqa: E501 - 'tls_trust_certs_secret': (SecretKeySelector,), # noqa: E501 - 'tls_validate_hostname': (bool,), # noqa: E501 - 'topics': ([str],), # noqa: E501 - 'type': (str,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth_athenz_params': 'authAthenzParams', # noqa: E501 - 'auth_athenz_secret': 'authAthenzSecret', # noqa: E501 - 'auth_token_secret': 'authTokenSecret', # noqa: E501 - 'connection_backoff': 'connectionBackoff', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'tls_allow_insecure_connection': 'tlsAllowInsecureConnection', # noqa: E501 - 'tls_trust_certs_secret': 'tlsTrustCertsSecret', # noqa: E501 - 'tls_validate_hostname': 'tlsValidateHostname', # noqa: E501 - 'topics': 'topics', # noqa: E501 - 'type': 'type', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PulsarEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth_athenz_params ({str: (str,)}): [optional] # noqa: E501 - auth_athenz_secret (SecretKeySelector): [optional] # noqa: E501 - auth_token_secret (SecretKeySelector): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - tls_allow_insecure_connection (bool): [optional] # noqa: E501 - tls_trust_certs_secret (SecretKeySelector): [optional] # noqa: E501 - tls_validate_hostname (bool): [optional] # noqa: E501 - topics ([str]): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PulsarEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth_athenz_params ({str: (str,)}): [optional] # noqa: E501 - auth_athenz_secret (SecretKeySelector): [optional] # noqa: E501 - auth_token_secret (SecretKeySelector): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - tls_allow_insecure_connection (bool): [optional] # noqa: E501 - tls_trust_certs_secret (SecretKeySelector): [optional] # noqa: E501 - tls_validate_hostname (bool): [optional] # noqa: E501 - topics ([str]): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_trigger.py deleted file mode 100644 index 5ad4e589aac6..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_trigger.py +++ /dev/null @@ -1,311 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1PulsarTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth_athenz_params': ({str: (str,)},), # noqa: E501 - 'auth_athenz_secret': (SecretKeySelector,), # noqa: E501 - 'auth_token_secret': (SecretKeySelector,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'tls_allow_insecure_connection': (bool,), # noqa: E501 - 'tls_trust_certs_secret': (SecretKeySelector,), # noqa: E501 - 'tls_validate_hostname': (bool,), # noqa: E501 - 'topic': (str,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth_athenz_params': 'authAthenzParams', # noqa: E501 - 'auth_athenz_secret': 'authAthenzSecret', # noqa: E501 - 'auth_token_secret': 'authTokenSecret', # noqa: E501 - 'connection_backoff': 'connectionBackoff', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'payload': 'payload', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'tls_allow_insecure_connection': 'tlsAllowInsecureConnection', # noqa: E501 - 'tls_trust_certs_secret': 'tlsTrustCertsSecret', # noqa: E501 - 'tls_validate_hostname': 'tlsValidateHostname', # noqa: E501 - 'topic': 'topic', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PulsarTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth_athenz_params ({str: (str,)}): [optional] # noqa: E501 - auth_athenz_secret (SecretKeySelector): [optional] # noqa: E501 - auth_token_secret (SecretKeySelector): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - tls_allow_insecure_connection (bool): [optional] # noqa: E501 - tls_trust_certs_secret (SecretKeySelector): [optional] # noqa: E501 - tls_validate_hostname (bool): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PulsarTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth_athenz_params ({str: (str,)}): [optional] # noqa: E501 - auth_athenz_secret (SecretKeySelector): [optional] # noqa: E501 - auth_token_secret (SecretKeySelector): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - tls_allow_insecure_connection (bool): [optional] # noqa: E501 - tls_trust_certs_secret (SecretKeySelector): [optional] # noqa: E501 - tls_validate_hostname (bool): [optional] # noqa: E501 - topic (str): [optional] # noqa: E501 - url (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_rate_limit.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_rate_limit.py deleted file mode 100644 index ff82cc26642f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_rate_limit.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1RateLimit(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'requests_per_unit': (int,), # noqa: E501 - 'unit': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'requests_per_unit': 'requestsPerUnit', # noqa: E501 - 'unit': 'unit', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RateLimit - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - requests_per_unit (int): [optional] # noqa: E501 - unit (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RateLimit - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - requests_per_unit (int): [optional] # noqa: E501 - unit (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_event_source.py deleted file mode 100644 index e3cf938a9038..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_event_source.py +++ /dev/null @@ -1,301 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1RedisEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'channels': ([str],), # noqa: E501 - 'db': (int,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'host_address': (str,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'password': (SecretKeySelector,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'username': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'channels': 'channels', # noqa: E501 - 'db': 'db', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'host_address': 'hostAddress', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'password': 'password', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'username': 'username', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RedisEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - channels ([str]): [optional] # noqa: E501 - db (int): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - host_address (str): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - username (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RedisEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - channels ([str]): [optional] # noqa: E501 - db (int): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - host_address (str): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - username (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_stream_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_stream_event_source.py deleted file mode 100644 index 1b2515ffac9b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_stream_event_source.py +++ /dev/null @@ -1,301 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1RedisStreamEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'consumer_group': (str,), # noqa: E501 - 'db': (int,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'host_address': (str,), # noqa: E501 - 'max_msg_count_per_read': (int,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'password': (SecretKeySelector,), # noqa: E501 - 'streams': ([str],), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'username': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'consumer_group': 'consumerGroup', # noqa: E501 - 'db': 'db', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'host_address': 'hostAddress', # noqa: E501 - 'max_msg_count_per_read': 'maxMsgCountPerRead', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'password': 'password', # noqa: E501 - 'streams': 'streams', # noqa: E501 - 'tls': 'tls', # noqa: E501 - 'username': 'username', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RedisStreamEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - consumer_group (str): [optional] # noqa: E501 - db (int): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - host_address (str): [optional] # noqa: E501 - max_msg_count_per_read (int): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - streams ([str]): Streams to look for entries. XREADGROUP is used on all streams using a single consumer group.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - username (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RedisStreamEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - consumer_group (str): [optional] # noqa: E501 - db (int): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - host_address (str): [optional] # noqa: E501 - max_msg_count_per_read (int): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - streams ([str]): Streams to look for entries. XREADGROUP is used on all streams using a single consumer group.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - username (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource.py deleted file mode 100644 index 41535c92a5ac..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource.py +++ /dev/null @@ -1,260 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1Resource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - ('value',): { - 'regex': { - 'pattern': r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', # noqa: E501 - }, - }, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Resource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Resource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_event_source.py deleted file mode 100644 index d26918d0740f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_event_source.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.group_version_resource import GroupVersionResource - from argo_workflows.model.io_argoproj_events_v1alpha1_resource_filter import IoArgoprojEventsV1alpha1ResourceFilter - globals()['GroupVersionResource'] = GroupVersionResource - globals()['IoArgoprojEventsV1alpha1ResourceFilter'] = IoArgoprojEventsV1alpha1ResourceFilter - - -class IoArgoprojEventsV1alpha1ResourceEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'event_types': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1ResourceFilter,), # noqa: E501 - 'group_version_resource': (GroupVersionResource,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'event_types': 'eventTypes', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'group_version_resource': 'groupVersionResource', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ResourceEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_types ([str]): EventTypes is the list of event type to watch. Possible values are - ADD, UPDATE and DELETE.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1ResourceFilter): [optional] # noqa: E501 - group_version_resource (GroupVersionResource): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ResourceEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - event_types ([str]): EventTypes is the list of event type to watch. Possible values are - ADD, UPDATE and DELETE.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1ResourceFilter): [optional] # noqa: E501 - group_version_resource (GroupVersionResource): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_filter.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_filter.py deleted file mode 100644 index 26dd205a0f1f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_filter.py +++ /dev/null @@ -1,277 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_selector import IoArgoprojEventsV1alpha1Selector - globals()['IoArgoprojEventsV1alpha1Selector'] = IoArgoprojEventsV1alpha1Selector - - -class IoArgoprojEventsV1alpha1ResourceFilter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'after_start': (bool,), # noqa: E501 - 'created_by': (datetime,), # noqa: E501 - 'fields': ([IoArgoprojEventsV1alpha1Selector],), # noqa: E501 - 'labels': ([IoArgoprojEventsV1alpha1Selector],), # noqa: E501 - 'prefix': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'after_start': 'afterStart', # noqa: E501 - 'created_by': 'createdBy', # noqa: E501 - 'fields': 'fields', # noqa: E501 - 'labels': 'labels', # noqa: E501 - 'prefix': 'prefix', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ResourceFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - after_start (bool): [optional] # noqa: E501 - created_by (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - fields ([IoArgoprojEventsV1alpha1Selector]): [optional] # noqa: E501 - labels ([IoArgoprojEventsV1alpha1Selector]): [optional] # noqa: E501 - prefix (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ResourceFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - after_start (bool): [optional] # noqa: E501 - created_by (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - fields ([IoArgoprojEventsV1alpha1Selector]): [optional] # noqa: E501 - labels ([IoArgoprojEventsV1alpha1Selector]): [optional] # noqa: E501 - prefix (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_artifact.py deleted file mode 100644 index 0778b1d7ba76..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_artifact.py +++ /dev/null @@ -1,301 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_s3_bucket import IoArgoprojEventsV1alpha1S3Bucket - from argo_workflows.model.io_argoproj_events_v1alpha1_s3_filter import IoArgoprojEventsV1alpha1S3Filter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1S3Bucket'] = IoArgoprojEventsV1alpha1S3Bucket - globals()['IoArgoprojEventsV1alpha1S3Filter'] = IoArgoprojEventsV1alpha1S3Filter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1S3Artifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_key': (SecretKeySelector,), # noqa: E501 - 'bucket': (IoArgoprojEventsV1alpha1S3Bucket,), # noqa: E501 - 'ca_certificate': (SecretKeySelector,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1S3Filter,), # noqa: E501 - 'insecure': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'region': (str,), # noqa: E501 - 'secret_key': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_key': 'accessKey', # noqa: E501 - 'bucket': 'bucket', # noqa: E501 - 'ca_certificate': 'caCertificate', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'events': 'events', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'insecure': 'insecure', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'region': 'region', # noqa: E501 - 'secret_key': 'secretKey', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Artifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key (SecretKeySelector): [optional] # noqa: E501 - bucket (IoArgoprojEventsV1alpha1S3Bucket): [optional] # noqa: E501 - ca_certificate (SecretKeySelector): [optional] # noqa: E501 - endpoint (str): [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1S3Filter): [optional] # noqa: E501 - insecure (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - secret_key (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Artifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key (SecretKeySelector): [optional] # noqa: E501 - bucket (IoArgoprojEventsV1alpha1S3Bucket): [optional] # noqa: E501 - ca_certificate (SecretKeySelector): [optional] # noqa: E501 - endpoint (str): [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1S3Filter): [optional] # noqa: E501 - insecure (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - secret_key (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_bucket.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_bucket.py deleted file mode 100644 index 56f61b075ced..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_bucket.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1S3Bucket(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'name': 'name', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Bucket - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - key (str): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Bucket - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - key (str): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_filter.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_filter.py deleted file mode 100644 index a2410f720c48..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_filter.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1S3Filter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'prefix': (str,), # noqa: E501 - 'suffix': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'prefix': 'prefix', # noqa: E501 - 'suffix': 'suffix', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Filter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - prefix (str): [optional] # noqa: E501 - suffix (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Filter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - prefix (str): [optional] # noqa: E501 - suffix (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sasl_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sasl_config.py deleted file mode 100644 index 4e4bf3408aef..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sasl_config.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1SASLConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'mechanism': (str,), # noqa: E501 - 'password_secret': (SecretKeySelector,), # noqa: E501 - 'user_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'mechanism': 'mechanism', # noqa: E501 - 'password_secret': 'passwordSecret', # noqa: E501 - 'user_secret': 'userSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SASLConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mechanism (str): [optional] # noqa: E501 - password_secret (SecretKeySelector): [optional] # noqa: E501 - user_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SASLConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mechanism (str): [optional] # noqa: E501 - password_secret (SecretKeySelector): [optional] # noqa: E501 - user_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_schema_registry_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_schema_registry_config.py deleted file mode 100644 index 0c0a69d68fd4..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_schema_registry_config.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - - -class IoArgoprojEventsV1alpha1SchemaRegistryConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 - 'schema_id': (int,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth': 'auth', # noqa: E501 - 'schema_id': 'schemaId', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SchemaRegistryConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - schema_id (int): [optional] # noqa: E501 - url (str): Schema Registry URL.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SchemaRegistryConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - schema_id (int): [optional] # noqa: E501 - url (str): Schema Registry URL.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_secure_header.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_secure_header.py deleted file mode 100644 index 3b2bc72fc0f9..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_secure_header.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_value_from_source import IoArgoprojEventsV1alpha1ValueFromSource - globals()['IoArgoprojEventsV1alpha1ValueFromSource'] = IoArgoprojEventsV1alpha1ValueFromSource - - -class IoArgoprojEventsV1alpha1SecureHeader(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'value_from': (IoArgoprojEventsV1alpha1ValueFromSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'value_from': 'valueFrom', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SecureHeader - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - value_from (IoArgoprojEventsV1alpha1ValueFromSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SecureHeader - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - value_from (IoArgoprojEventsV1alpha1ValueFromSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_selector.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_selector.py deleted file mode 100644 index 54c76eeeaf3e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_selector.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1Selector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'operation': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'operation': 'operation', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Selector - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - key (str): [optional] # noqa: E501 - operation (str): [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Selector - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - key (str): [optional] # noqa: E501 - operation (str): [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor.py deleted file mode 100644 index 6931d2dc53e7..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_spec import IoArgoprojEventsV1alpha1SensorSpec - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_status import IoArgoprojEventsV1alpha1SensorStatus - from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojEventsV1alpha1SensorSpec'] = IoArgoprojEventsV1alpha1SensorSpec - globals()['IoArgoprojEventsV1alpha1SensorStatus'] = IoArgoprojEventsV1alpha1SensorStatus - globals()['ObjectMeta'] = ObjectMeta - - -class IoArgoprojEventsV1alpha1Sensor(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (IoArgoprojEventsV1alpha1SensorSpec,), # noqa: E501 - 'status': (IoArgoprojEventsV1alpha1SensorStatus,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Sensor - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - spec (IoArgoprojEventsV1alpha1SensorSpec): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1SensorStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Sensor - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - spec (IoArgoprojEventsV1alpha1SensorSpec): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1SensorStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_list.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_list.py deleted file mode 100644 index dee62a1f3f90..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_list.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor - from argo_workflows.model.list_meta import ListMeta - globals()['IoArgoprojEventsV1alpha1Sensor'] = IoArgoprojEventsV1alpha1Sensor - globals()['ListMeta'] = ListMeta - - -class IoArgoprojEventsV1alpha1SensorList(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([IoArgoprojEventsV1alpha1Sensor],), # noqa: E501 - 'metadata': (ListMeta,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorList - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([IoArgoprojEventsV1alpha1Sensor]): [optional] # noqa: E501 - metadata (ListMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorList - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([IoArgoprojEventsV1alpha1Sensor]): [optional] # noqa: E501 - metadata (ListMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_spec.py deleted file mode 100644 index ed5683bcdb7b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_spec.py +++ /dev/null @@ -1,293 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency import IoArgoprojEventsV1alpha1EventDependency - from argo_workflows.model.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger import IoArgoprojEventsV1alpha1Trigger - globals()['IoArgoprojEventsV1alpha1EventDependency'] = IoArgoprojEventsV1alpha1EventDependency - globals()['IoArgoprojEventsV1alpha1Template'] = IoArgoprojEventsV1alpha1Template - globals()['IoArgoprojEventsV1alpha1Trigger'] = IoArgoprojEventsV1alpha1Trigger - - -class IoArgoprojEventsV1alpha1SensorSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'dependencies': ([IoArgoprojEventsV1alpha1EventDependency],), # noqa: E501 - 'error_on_failed_round': (bool,), # noqa: E501 - 'event_bus_name': (str,), # noqa: E501 - 'logging_fields': ({str: (str,)},), # noqa: E501 - 'replicas': (int,), # noqa: E501 - 'revision_history_limit': (int,), # noqa: E501 - 'template': (IoArgoprojEventsV1alpha1Template,), # noqa: E501 - 'triggers': ([IoArgoprojEventsV1alpha1Trigger],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'dependencies': 'dependencies', # noqa: E501 - 'error_on_failed_round': 'errorOnFailedRound', # noqa: E501 - 'event_bus_name': 'eventBusName', # noqa: E501 - 'logging_fields': 'loggingFields', # noqa: E501 - 'replicas': 'replicas', # noqa: E501 - 'revision_history_limit': 'revisionHistoryLimit', # noqa: E501 - 'template': 'template', # noqa: E501 - 'triggers': 'triggers', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dependencies ([IoArgoprojEventsV1alpha1EventDependency]): Dependencies is a list of the events that this sensor is dependent on.. [optional] # noqa: E501 - error_on_failed_round (bool): ErrorOnFailedRound if set to true, marks sensor state as `error` if the previous trigger round fails. Once sensor state is set to `error`, no further triggers will be processed.. [optional] # noqa: E501 - event_bus_name (str): [optional] # noqa: E501 - logging_fields ({str: (str,)}): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - revision_history_limit (int): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1Template): [optional] # noqa: E501 - triggers ([IoArgoprojEventsV1alpha1Trigger]): Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dependencies ([IoArgoprojEventsV1alpha1EventDependency]): Dependencies is a list of the events that this sensor is dependent on.. [optional] # noqa: E501 - error_on_failed_round (bool): ErrorOnFailedRound if set to true, marks sensor state as `error` if the previous trigger round fails. Once sensor state is set to `error`, no further triggers will be processed.. [optional] # noqa: E501 - event_bus_name (str): [optional] # noqa: E501 - logging_fields ({str: (str,)}): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - revision_history_limit (int): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1Template): [optional] # noqa: E501 - triggers ([IoArgoprojEventsV1alpha1Trigger]): Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_status.py deleted file mode 100644 index 6dfa9291858e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_status.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status - globals()['IoArgoprojEventsV1alpha1Status'] = IoArgoprojEventsV1alpha1Status - - -class IoArgoprojEventsV1alpha1SensorStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'status': (IoArgoprojEventsV1alpha1Status,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - status (IoArgoprojEventsV1alpha1Status): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - status (IoArgoprojEventsV1alpha1Status): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_service.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_service.py deleted file mode 100644 index ca7c730af766..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_service.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.service_port import ServicePort - globals()['ServicePort'] = ServicePort - - -class IoArgoprojEventsV1alpha1Service(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'cluster_ip': (str,), # noqa: E501 - 'ports': ([ServicePort],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cluster_ip': 'clusterIP', # noqa: E501 - 'ports': 'ports', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Service - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cluster_ip (str): [optional] # noqa: E501 - ports ([ServicePort]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Service - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cluster_ip (str): [optional] # noqa: E501 - ports ([ServicePort]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sftp_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sftp_event_source.py deleted file mode 100644 index 79d29af7a1e8..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sftp_event_source.py +++ /dev/null @@ -1,297 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WatchPathConfig'] = IoArgoprojEventsV1alpha1WatchPathConfig - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1SFTPEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'address': (SecretKeySelector,), # noqa: E501 - 'event_type': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'password': (SecretKeySelector,), # noqa: E501 - 'poll_interval_duration': (str,), # noqa: E501 - 'ssh_key_secret': (SecretKeySelector,), # noqa: E501 - 'username': (SecretKeySelector,), # noqa: E501 - 'watch_path_config': (IoArgoprojEventsV1alpha1WatchPathConfig,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'address': 'address', # noqa: E501 - 'event_type': 'eventType', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'password': 'password', # noqa: E501 - 'poll_interval_duration': 'pollIntervalDuration', # noqa: E501 - 'ssh_key_secret': 'sshKeySecret', # noqa: E501 - 'username': 'username', # noqa: E501 - 'watch_path_config': 'watchPathConfig', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SFTPEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - address (SecretKeySelector): [optional] # noqa: E501 - event_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - poll_interval_duration (str): [optional] # noqa: E501 - ssh_key_secret (SecretKeySelector): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SFTPEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - address (SecretKeySelector): [optional] # noqa: E501 - event_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - password (SecretKeySelector): [optional] # noqa: E501 - poll_interval_duration (str): [optional] # noqa: E501 - ssh_key_secret (SecretKeySelector): [optional] # noqa: E501 - username (SecretKeySelector): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_event_source.py deleted file mode 100644 index 56a1713060ec..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_event_source.py +++ /dev/null @@ -1,281 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1SlackEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'signing_secret': (SecretKeySelector,), # noqa: E501 - 'token': (SecretKeySelector,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'filter': 'filter', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'signing_secret': 'signingSecret', # noqa: E501 - 'token': 'token', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - signing_secret (SecretKeySelector): [optional] # noqa: E501 - token (SecretKeySelector): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - signing_secret (SecretKeySelector): [optional] # noqa: E501 - token (SecretKeySelector): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_sender.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_sender.py deleted file mode 100644 index 1692b09f190e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_sender.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1SlackSender(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'icon': (str,), # noqa: E501 - 'username': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'icon': 'icon', # noqa: E501 - 'username': 'username', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackSender - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - icon (str): [optional] # noqa: E501 - username (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackSender - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - icon (str): [optional] # noqa: E501 - username (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_thread.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_thread.py deleted file mode 100644 index e8fb2e1d05ee..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_thread.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1SlackThread(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'broadcast_message_to_channel': (bool,), # noqa: E501 - 'message_aggregation_key': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'broadcast_message_to_channel': 'broadcastMessageToChannel', # noqa: E501 - 'message_aggregation_key': 'messageAggregationKey', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackThread - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - broadcast_message_to_channel (bool): [optional] # noqa: E501 - message_aggregation_key (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackThread - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - broadcast_message_to_channel (bool): [optional] # noqa: E501 - message_aggregation_key (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_trigger.py deleted file mode 100644 index dfcaee6f5a30..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_trigger.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_slack_sender import IoArgoprojEventsV1alpha1SlackSender - from argo_workflows.model.io_argoproj_events_v1alpha1_slack_thread import IoArgoprojEventsV1alpha1SlackThread - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1SlackSender'] = IoArgoprojEventsV1alpha1SlackSender - globals()['IoArgoprojEventsV1alpha1SlackThread'] = IoArgoprojEventsV1alpha1SlackThread - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1SlackTrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'attachments': (str,), # noqa: E501 - 'blocks': (str,), # noqa: E501 - 'channel': (str,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'sender': (IoArgoprojEventsV1alpha1SlackSender,), # noqa: E501 - 'slack_token': (SecretKeySelector,), # noqa: E501 - 'thread': (IoArgoprojEventsV1alpha1SlackThread,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'attachments': 'attachments', # noqa: E501 - 'blocks': 'blocks', # noqa: E501 - 'channel': 'channel', # noqa: E501 - 'message': 'message', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'sender': 'sender', # noqa: E501 - 'slack_token': 'slackToken', # noqa: E501 - 'thread': 'thread', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - attachments (str): [optional] # noqa: E501 - blocks (str): [optional] # noqa: E501 - channel (str): [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - sender (IoArgoprojEventsV1alpha1SlackSender): [optional] # noqa: E501 - slack_token (SecretKeySelector): [optional] # noqa: E501 - thread (IoArgoprojEventsV1alpha1SlackThread): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackTrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - attachments (str): [optional] # noqa: E501 - blocks (str): [optional] # noqa: E501 - channel (str): [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - sender (IoArgoprojEventsV1alpha1SlackSender): [optional] # noqa: E501 - slack_token (SecretKeySelector): [optional] # noqa: E501 - thread (IoArgoprojEventsV1alpha1SlackThread): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sns_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sns_event_source.py deleted file mode 100644 index 1ee61864431e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sns_event_source.py +++ /dev/null @@ -1,301 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1SNSEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_key': (SecretKeySelector,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'region': (str,), # noqa: E501 - 'role_arn': (str,), # noqa: E501 - 'secret_key': (SecretKeySelector,), # noqa: E501 - 'topic_arn': (str,), # noqa: E501 - 'validate_signature': (bool,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_key': 'accessKey', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'region': 'region', # noqa: E501 - 'role_arn': 'roleARN', # noqa: E501 - 'secret_key': 'secretKey', # noqa: E501 - 'topic_arn': 'topicArn', # noqa: E501 - 'validate_signature': 'validateSignature', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SNSEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key (SecretKeySelector): [optional] # noqa: E501 - endpoint (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - role_arn (str): [optional] # noqa: E501 - secret_key (SecretKeySelector): [optional] # noqa: E501 - topic_arn (str): [optional] # noqa: E501 - validate_signature (bool): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SNSEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key (SecretKeySelector): [optional] # noqa: E501 - endpoint (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - role_arn (str): [optional] # noqa: E501 - secret_key (SecretKeySelector): [optional] # noqa: E501 - topic_arn (str): [optional] # noqa: E501 - validate_signature (bool): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sqs_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sqs_event_source.py deleted file mode 100644 index 44d583d5dbe6..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sqs_event_source.py +++ /dev/null @@ -1,311 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1SQSEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_key': (SecretKeySelector,), # noqa: E501 - 'dlq': (bool,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'json_body': (bool,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'queue': (str,), # noqa: E501 - 'queue_account_id': (str,), # noqa: E501 - 'region': (str,), # noqa: E501 - 'role_arn': (str,), # noqa: E501 - 'secret_key': (SecretKeySelector,), # noqa: E501 - 'session_token': (SecretKeySelector,), # noqa: E501 - 'wait_time_seconds': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_key': 'accessKey', # noqa: E501 - 'dlq': 'dlq', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'json_body': 'jsonBody', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'queue': 'queue', # noqa: E501 - 'queue_account_id': 'queueAccountId', # noqa: E501 - 'region': 'region', # noqa: E501 - 'role_arn': 'roleARN', # noqa: E501 - 'secret_key': 'secretKey', # noqa: E501 - 'session_token': 'sessionToken', # noqa: E501 - 'wait_time_seconds': 'waitTimeSeconds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SQSEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key (SecretKeySelector): [optional] # noqa: E501 - dlq (bool): [optional] # noqa: E501 - endpoint (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - queue (str): [optional] # noqa: E501 - queue_account_id (str): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - role_arn (str): [optional] # noqa: E501 - secret_key (SecretKeySelector): [optional] # noqa: E501 - session_token (SecretKeySelector): [optional] # noqa: E501 - wait_time_seconds (str): WaitTimeSeconds is The duration (in seconds) for which the call waits for a message to arrive in the queue before returning.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SQSEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key (SecretKeySelector): [optional] # noqa: E501 - dlq (bool): [optional] # noqa: E501 - endpoint (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - json_body (bool): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - queue (str): [optional] # noqa: E501 - queue_account_id (str): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - role_arn (str): [optional] # noqa: E501 - secret_key (SecretKeySelector): [optional] # noqa: E501 - session_token (SecretKeySelector): [optional] # noqa: E501 - wait_time_seconds (str): WaitTimeSeconds is The duration (in seconds) for which the call waits for a message to arrive in the queue before returning.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py deleted file mode 100644 index 0f0a4e540842..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1ArtifactLocation'] = IoArgoprojEventsV1alpha1ArtifactLocation - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - - -class IoArgoprojEventsV1alpha1StandardK8STrigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'live_object': (bool,), # noqa: E501 - 'operation': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'patch_strategy': (str,), # noqa: E501 - 'source': (IoArgoprojEventsV1alpha1ArtifactLocation,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'live_object': 'liveObject', # noqa: E501 - 'operation': 'operation', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'patch_strategy': 'patchStrategy', # noqa: E501 - 'source': 'source', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StandardK8STrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - live_object (bool): [optional] # noqa: E501 - operation (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved K8s trigger object.. [optional] # noqa: E501 - patch_strategy (str): [optional] # noqa: E501 - source (IoArgoprojEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StandardK8STrigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - live_object (bool): [optional] # noqa: E501 - operation (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved K8s trigger object.. [optional] # noqa: E501 - patch_strategy (str): [optional] # noqa: E501 - source (IoArgoprojEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status.py deleted file mode 100644 index c29559f7eb18..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_condition import IoArgoprojEventsV1alpha1Condition - globals()['IoArgoprojEventsV1alpha1Condition'] = IoArgoprojEventsV1alpha1Condition - - -class IoArgoprojEventsV1alpha1Status(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'conditions': ([IoArgoprojEventsV1alpha1Condition],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'conditions': 'conditions', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Status - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - conditions ([IoArgoprojEventsV1alpha1Condition]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Status - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - conditions ([IoArgoprojEventsV1alpha1Condition]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status_policy.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status_policy.py deleted file mode 100644 index d27324e86210..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status_policy.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1StatusPolicy(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'allow': ([int],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'allow': 'allow', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StatusPolicy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - allow ([int]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StatusPolicy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - allow ([int]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_event_source.py deleted file mode 100644 index 6b3a78fffbe6..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_event_source.py +++ /dev/null @@ -1,297 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_storage_grid_filter import IoArgoprojEventsV1alpha1StorageGridFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1StorageGridFilter'] = IoArgoprojEventsV1alpha1StorageGridFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1StorageGridEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'api_url': (str,), # noqa: E501 - 'auth_token': (SecretKeySelector,), # noqa: E501 - 'bucket': (str,), # noqa: E501 - 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1StorageGridFilter,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'region': (str,), # noqa: E501 - 'topic_arn': (str,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'api_url': 'apiURL', # noqa: E501 - 'auth_token': 'authToken', # noqa: E501 - 'bucket': 'bucket', # noqa: E501 - 'events': 'events', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'region': 'region', # noqa: E501 - 'topic_arn': 'topicArn', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StorageGridEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_url (str): APIURL is the url of the storagegrid api.. [optional] # noqa: E501 - auth_token (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Name of the bucket to register notifications for.. [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1StorageGridFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - topic_arn (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StorageGridEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_url (str): APIURL is the url of the storagegrid api.. [optional] # noqa: E501 - auth_token (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Name of the bucket to register notifications for.. [optional] # noqa: E501 - events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1StorageGridFilter): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - region (str): [optional] # noqa: E501 - topic_arn (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_filter.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_filter.py deleted file mode 100644 index e15d005d6566..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_filter.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1StorageGridFilter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'prefix': (str,), # noqa: E501 - 'suffix': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'prefix': 'prefix', # noqa: E501 - 'suffix': 'suffix', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StorageGridFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - prefix (str): [optional] # noqa: E501 - suffix (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StorageGridFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - prefix (str): [optional] # noqa: E501 - suffix (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_stripe_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_stripe_event_source.py deleted file mode 100644 index 60ce13c32c0a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_stripe_event_source.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1StripeEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'api_key': (SecretKeySelector,), # noqa: E501 - 'create_webhook': (bool,), # noqa: E501 - 'event_filter': ([str],), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'api_key': 'apiKey', # noqa: E501 - 'create_webhook': 'createWebhook', # noqa: E501 - 'event_filter': 'eventFilter', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StripeEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_key (SecretKeySelector): [optional] # noqa: E501 - create_webhook (bool): [optional] # noqa: E501 - event_filter ([str]): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StripeEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_key (SecretKeySelector): [optional] # noqa: E501 - create_webhook (bool): [optional] # noqa: E501 - event_filter ([str]): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_template.py deleted file mode 100644 index fe6dd7c6b393..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_template.py +++ /dev/null @@ -1,313 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.affinity import Affinity - from argo_workflows.model.container import Container - from argo_workflows.model.io_argoproj_events_v1alpha1_metadata import IoArgoprojEventsV1alpha1Metadata - from argo_workflows.model.local_object_reference import LocalObjectReference - from argo_workflows.model.pod_security_context import PodSecurityContext - from argo_workflows.model.toleration import Toleration - from argo_workflows.model.volume import Volume - globals()['Affinity'] = Affinity - globals()['Container'] = Container - globals()['IoArgoprojEventsV1alpha1Metadata'] = IoArgoprojEventsV1alpha1Metadata - globals()['LocalObjectReference'] = LocalObjectReference - globals()['PodSecurityContext'] = PodSecurityContext - globals()['Toleration'] = Toleration - globals()['Volume'] = Volume - - -class IoArgoprojEventsV1alpha1Template(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'affinity': (Affinity,), # noqa: E501 - 'container': (Container,), # noqa: E501 - 'image_pull_secrets': ([LocalObjectReference],), # noqa: E501 - 'metadata': (IoArgoprojEventsV1alpha1Metadata,), # noqa: E501 - 'node_selector': ({str: (str,)},), # noqa: E501 - 'priority': (int,), # noqa: E501 - 'priority_class_name': (str,), # noqa: E501 - 'security_context': (PodSecurityContext,), # noqa: E501 - 'service_account_name': (str,), # noqa: E501 - 'tolerations': ([Toleration],), # noqa: E501 - 'volumes': ([Volume],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'affinity': 'affinity', # noqa: E501 - 'container': 'container', # noqa: E501 - 'image_pull_secrets': 'imagePullSecrets', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'node_selector': 'nodeSelector', # noqa: E501 - 'priority': 'priority', # noqa: E501 - 'priority_class_name': 'priorityClassName', # noqa: E501 - 'security_context': 'securityContext', # noqa: E501 - 'service_account_name': 'serviceAccountName', # noqa: E501 - 'tolerations': 'tolerations', # noqa: E501 - 'volumes': 'volumes', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Template - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - affinity (Affinity): [optional] # noqa: E501 - container (Container): [optional] # noqa: E501 - image_pull_secrets ([LocalObjectReference]): [optional] # noqa: E501 - metadata (IoArgoprojEventsV1alpha1Metadata): [optional] # noqa: E501 - node_selector ({str: (str,)}): [optional] # noqa: E501 - priority (int): [optional] # noqa: E501 - priority_class_name (str): [optional] # noqa: E501 - security_context (PodSecurityContext): [optional] # noqa: E501 - service_account_name (str): [optional] # noqa: E501 - tolerations ([Toleration]): [optional] # noqa: E501 - volumes ([Volume]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Template - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - affinity (Affinity): [optional] # noqa: E501 - container (Container): [optional] # noqa: E501 - image_pull_secrets ([LocalObjectReference]): [optional] # noqa: E501 - metadata (IoArgoprojEventsV1alpha1Metadata): [optional] # noqa: E501 - node_selector ({str: (str,)}): [optional] # noqa: E501 - priority (int): [optional] # noqa: E501 - priority_class_name (str): [optional] # noqa: E501 - security_context (PodSecurityContext): [optional] # noqa: E501 - service_account_name (str): [optional] # noqa: E501 - tolerations ([Toleration]): [optional] # noqa: E501 - volumes ([Volume]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_time_filter.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_time_filter.py deleted file mode 100644 index 8ad1034f178b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_time_filter.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1TimeFilter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'start': (str,), # noqa: E501 - 'stop': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'start': 'start', # noqa: E501 - 'stop': 'stop', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TimeFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - start (str): Start is the beginning of a time window in UTC. Before this time, events for this dependency are ignored. Format is hh:mm:ss.. [optional] # noqa: E501 - stop (str): Stop is the end of a time window in UTC. After or equal to this time, events for this dependency are ignored and Format is hh:mm:ss. If it is smaller than Start, it is treated as next day of Start (e.g.: 22:00:00-01:00:00 means 22:00:00-25:00:00).. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TimeFilter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - start (str): Start is the beginning of a time window in UTC. Before this time, events for this dependency are ignored. Format is hh:mm:ss.. [optional] # noqa: E501 - stop (str): Stop is the end of a time window in UTC. After or equal to this time, events for this dependency are ignored and Format is hh:mm:ss. If it is smaller than Start, it is treated as next day of Start (e.g.: 22:00:00-01:00:00 means 22:00:00-25:00:00).. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_tls_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_tls_config.py deleted file mode 100644 index 1fee1144e475..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_tls_config.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1TLSConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'ca_cert_secret': (SecretKeySelector,), # noqa: E501 - 'client_cert_secret': (SecretKeySelector,), # noqa: E501 - 'client_key_secret': (SecretKeySelector,), # noqa: E501 - 'insecure_skip_verify': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'ca_cert_secret': 'caCertSecret', # noqa: E501 - 'client_cert_secret': 'clientCertSecret', # noqa: E501 - 'client_key_secret': 'clientKeySecret', # noqa: E501 - 'insecure_skip_verify': 'insecureSkipVerify', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TLSConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - ca_cert_secret (SecretKeySelector): [optional] # noqa: E501 - client_cert_secret (SecretKeySelector): [optional] # noqa: E501 - client_key_secret (SecretKeySelector): [optional] # noqa: E501 - insecure_skip_verify (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TLSConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - ca_cert_secret (SecretKeySelector): [optional] # noqa: E501 - client_cert_secret (SecretKeySelector): [optional] # noqa: E501 - client_key_secret (SecretKeySelector): [optional] # noqa: E501 - insecure_skip_verify (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger.py deleted file mode 100644 index b00df6dfaf0c..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger.py +++ /dev/null @@ -1,289 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_rate_limit import IoArgoprojEventsV1alpha1RateLimit - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_policy import IoArgoprojEventsV1alpha1TriggerPolicy - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_template import IoArgoprojEventsV1alpha1TriggerTemplate - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1RateLimit'] = IoArgoprojEventsV1alpha1RateLimit - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1TriggerPolicy'] = IoArgoprojEventsV1alpha1TriggerPolicy - globals()['IoArgoprojEventsV1alpha1TriggerTemplate'] = IoArgoprojEventsV1alpha1TriggerTemplate - - -class IoArgoprojEventsV1alpha1Trigger(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'at_least_once': (bool,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'policy': (IoArgoprojEventsV1alpha1TriggerPolicy,), # noqa: E501 - 'rate_limit': (IoArgoprojEventsV1alpha1RateLimit,), # noqa: E501 - 'retry_strategy': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'template': (IoArgoprojEventsV1alpha1TriggerTemplate,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'at_least_once': 'atLeastOnce', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'policy': 'policy', # noqa: E501 - 'rate_limit': 'rateLimit', # noqa: E501 - 'retry_strategy': 'retryStrategy', # noqa: E501 - 'template': 'template', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Trigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - at_least_once (bool): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - policy (IoArgoprojEventsV1alpha1TriggerPolicy): [optional] # noqa: E501 - rate_limit (IoArgoprojEventsV1alpha1RateLimit): [optional] # noqa: E501 - retry_strategy (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1TriggerTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Trigger - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - at_least_once (bool): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - policy (IoArgoprojEventsV1alpha1TriggerPolicy): [optional] # noqa: E501 - rate_limit (IoArgoprojEventsV1alpha1RateLimit): [optional] # noqa: E501 - retry_strategy (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1TriggerTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter.py deleted file mode 100644 index 8c88dd6b7d4e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter_source import IoArgoprojEventsV1alpha1TriggerParameterSource - globals()['IoArgoprojEventsV1alpha1TriggerParameterSource'] = IoArgoprojEventsV1alpha1TriggerParameterSource - - -class IoArgoprojEventsV1alpha1TriggerParameter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'dest': (str,), # noqa: E501 - 'operation': (str,), # noqa: E501 - 'src': (IoArgoprojEventsV1alpha1TriggerParameterSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'dest': 'dest', # noqa: E501 - 'operation': 'operation', # noqa: E501 - 'src': 'src', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerParameter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dest (str): Dest is the JSONPath of a resource key. A path is a series of keys separated by a dot. The colon character can be escaped with '.' The -1 key can be used to append a value to an existing array. See https://github.com/tidwall/sjson#path-syntax for more information about how this is used.. [optional] # noqa: E501 - operation (str): Operation is what to do with the existing value at Dest, whether to 'prepend', 'overwrite', or 'append' it.. [optional] # noqa: E501 - src (IoArgoprojEventsV1alpha1TriggerParameterSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerParameter - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dest (str): Dest is the JSONPath of a resource key. A path is a series of keys separated by a dot. The colon character can be escaped with '.' The -1 key can be used to append a value to an existing array. See https://github.com/tidwall/sjson#path-syntax for more information about how this is used.. [optional] # noqa: E501 - operation (str): Operation is what to do with the existing value at Dest, whether to 'prepend', 'overwrite', or 'append' it.. [optional] # noqa: E501 - src (IoArgoprojEventsV1alpha1TriggerParameterSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter_source.py deleted file mode 100644 index 84328054ecbb..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter_source.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1TriggerParameterSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'context_key': (str,), # noqa: E501 - 'context_template': (str,), # noqa: E501 - 'data_key': (str,), # noqa: E501 - 'data_template': (str,), # noqa: E501 - 'dependency_name': (str,), # noqa: E501 - 'use_raw_data': (bool,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'context_key': 'contextKey', # noqa: E501 - 'context_template': 'contextTemplate', # noqa: E501 - 'data_key': 'dataKey', # noqa: E501 - 'data_template': 'dataTemplate', # noqa: E501 - 'dependency_name': 'dependencyName', # noqa: E501 - 'use_raw_data': 'useRawData', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerParameterSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - context_key (str): ContextKey is the JSONPath of the event's (JSON decoded) context key ContextKey is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.. [optional] # noqa: E501 - context_template (str): [optional] # noqa: E501 - data_key (str): DataKey is the JSONPath of the event's (JSON decoded) data key DataKey is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.. [optional] # noqa: E501 - data_template (str): [optional] # noqa: E501 - dependency_name (str): DependencyName refers to the name of the dependency. The event which is stored for this dependency is used as payload for the parameterization. Make sure to refer to one of the dependencies you have defined under Dependencies list.. [optional] # noqa: E501 - use_raw_data (bool): [optional] # noqa: E501 - value (str): Value is the default literal value to use for this parameter source This is only used if the DataKey is invalid. If the DataKey is invalid and this is not defined, this param source will produce an error.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerParameterSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - context_key (str): ContextKey is the JSONPath of the event's (JSON decoded) context key ContextKey is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.. [optional] # noqa: E501 - context_template (str): [optional] # noqa: E501 - data_key (str): DataKey is the JSONPath of the event's (JSON decoded) data key DataKey is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.. [optional] # noqa: E501 - data_template (str): [optional] # noqa: E501 - dependency_name (str): DependencyName refers to the name of the dependency. The event which is stored for this dependency is used as payload for the parameterization. Make sure to refer to one of the dependencies you have defined under Dependencies list.. [optional] # noqa: E501 - use_raw_data (bool): [optional] # noqa: E501 - value (str): Value is the default literal value to use for this parameter source This is only used if the DataKey is invalid. If the DataKey is invalid and this is not defined, this param source will produce an error.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_policy.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_policy.py deleted file mode 100644 index e2a0584d293a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_policy.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_k8_s_resource_policy import IoArgoprojEventsV1alpha1K8SResourcePolicy - from argo_workflows.model.io_argoproj_events_v1alpha1_status_policy import IoArgoprojEventsV1alpha1StatusPolicy - globals()['IoArgoprojEventsV1alpha1K8SResourcePolicy'] = IoArgoprojEventsV1alpha1K8SResourcePolicy - globals()['IoArgoprojEventsV1alpha1StatusPolicy'] = IoArgoprojEventsV1alpha1StatusPolicy - - -class IoArgoprojEventsV1alpha1TriggerPolicy(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'k8s': (IoArgoprojEventsV1alpha1K8SResourcePolicy,), # noqa: E501 - 'status': (IoArgoprojEventsV1alpha1StatusPolicy,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'k8s': 'k8s', # noqa: E501 - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerPolicy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - k8s (IoArgoprojEventsV1alpha1K8SResourcePolicy): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1StatusPolicy): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerPolicy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - k8s (IoArgoprojEventsV1alpha1K8SResourcePolicy): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1StatusPolicy): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_template.py deleted file mode 100644 index 071237736196..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_template.py +++ /dev/null @@ -1,353 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_argo_workflow_trigger import IoArgoprojEventsV1alpha1ArgoWorkflowTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_aws_lambda_trigger import IoArgoprojEventsV1alpha1AWSLambdaTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_event_hubs_trigger import IoArgoprojEventsV1alpha1AzureEventHubsTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_service_bus_trigger import IoArgoprojEventsV1alpha1AzureServiceBusTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_conditions_reset_criteria import IoArgoprojEventsV1alpha1ConditionsResetCriteria - from argo_workflows.model.io_argoproj_events_v1alpha1_custom_trigger import IoArgoprojEventsV1alpha1CustomTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_email_trigger import IoArgoprojEventsV1alpha1EmailTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_http_trigger import IoArgoprojEventsV1alpha1HTTPTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_trigger import IoArgoprojEventsV1alpha1KafkaTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_log_trigger import IoArgoprojEventsV1alpha1LogTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_nats_trigger import IoArgoprojEventsV1alpha1NATSTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_open_whisk_trigger import IoArgoprojEventsV1alpha1OpenWhiskTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_pulsar_trigger import IoArgoprojEventsV1alpha1PulsarTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_slack_trigger import IoArgoprojEventsV1alpha1SlackTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_standard_k8_s_trigger import IoArgoprojEventsV1alpha1StandardK8STrigger - globals()['IoArgoprojEventsV1alpha1AWSLambdaTrigger'] = IoArgoprojEventsV1alpha1AWSLambdaTrigger - globals()['IoArgoprojEventsV1alpha1ArgoWorkflowTrigger'] = IoArgoprojEventsV1alpha1ArgoWorkflowTrigger - globals()['IoArgoprojEventsV1alpha1AzureEventHubsTrigger'] = IoArgoprojEventsV1alpha1AzureEventHubsTrigger - globals()['IoArgoprojEventsV1alpha1AzureServiceBusTrigger'] = IoArgoprojEventsV1alpha1AzureServiceBusTrigger - globals()['IoArgoprojEventsV1alpha1ConditionsResetCriteria'] = IoArgoprojEventsV1alpha1ConditionsResetCriteria - globals()['IoArgoprojEventsV1alpha1CustomTrigger'] = IoArgoprojEventsV1alpha1CustomTrigger - globals()['IoArgoprojEventsV1alpha1EmailTrigger'] = IoArgoprojEventsV1alpha1EmailTrigger - globals()['IoArgoprojEventsV1alpha1HTTPTrigger'] = IoArgoprojEventsV1alpha1HTTPTrigger - globals()['IoArgoprojEventsV1alpha1KafkaTrigger'] = IoArgoprojEventsV1alpha1KafkaTrigger - globals()['IoArgoprojEventsV1alpha1LogTrigger'] = IoArgoprojEventsV1alpha1LogTrigger - globals()['IoArgoprojEventsV1alpha1NATSTrigger'] = IoArgoprojEventsV1alpha1NATSTrigger - globals()['IoArgoprojEventsV1alpha1OpenWhiskTrigger'] = IoArgoprojEventsV1alpha1OpenWhiskTrigger - globals()['IoArgoprojEventsV1alpha1PulsarTrigger'] = IoArgoprojEventsV1alpha1PulsarTrigger - globals()['IoArgoprojEventsV1alpha1SlackTrigger'] = IoArgoprojEventsV1alpha1SlackTrigger - globals()['IoArgoprojEventsV1alpha1StandardK8STrigger'] = IoArgoprojEventsV1alpha1StandardK8STrigger - - -class IoArgoprojEventsV1alpha1TriggerTemplate(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'argo_workflow': (IoArgoprojEventsV1alpha1ArgoWorkflowTrigger,), # noqa: E501 - 'aws_lambda': (IoArgoprojEventsV1alpha1AWSLambdaTrigger,), # noqa: E501 - 'azure_event_hubs': (IoArgoprojEventsV1alpha1AzureEventHubsTrigger,), # noqa: E501 - 'azure_service_bus': (IoArgoprojEventsV1alpha1AzureServiceBusTrigger,), # noqa: E501 - 'conditions': (str,), # noqa: E501 - 'conditions_reset': ([IoArgoprojEventsV1alpha1ConditionsResetCriteria],), # noqa: E501 - 'custom': (IoArgoprojEventsV1alpha1CustomTrigger,), # noqa: E501 - 'email': (IoArgoprojEventsV1alpha1EmailTrigger,), # noqa: E501 - 'http': (IoArgoprojEventsV1alpha1HTTPTrigger,), # noqa: E501 - 'k8s': (IoArgoprojEventsV1alpha1StandardK8STrigger,), # noqa: E501 - 'kafka': (IoArgoprojEventsV1alpha1KafkaTrigger,), # noqa: E501 - 'log': (IoArgoprojEventsV1alpha1LogTrigger,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'nats': (IoArgoprojEventsV1alpha1NATSTrigger,), # noqa: E501 - 'open_whisk': (IoArgoprojEventsV1alpha1OpenWhiskTrigger,), # noqa: E501 - 'pulsar': (IoArgoprojEventsV1alpha1PulsarTrigger,), # noqa: E501 - 'slack': (IoArgoprojEventsV1alpha1SlackTrigger,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'argo_workflow': 'argoWorkflow', # noqa: E501 - 'aws_lambda': 'awsLambda', # noqa: E501 - 'azure_event_hubs': 'azureEventHubs', # noqa: E501 - 'azure_service_bus': 'azureServiceBus', # noqa: E501 - 'conditions': 'conditions', # noqa: E501 - 'conditions_reset': 'conditionsReset', # noqa: E501 - 'custom': 'custom', # noqa: E501 - 'email': 'email', # noqa: E501 - 'http': 'http', # noqa: E501 - 'k8s': 'k8s', # noqa: E501 - 'kafka': 'kafka', # noqa: E501 - 'log': 'log', # noqa: E501 - 'name': 'name', # noqa: E501 - 'nats': 'nats', # noqa: E501 - 'open_whisk': 'openWhisk', # noqa: E501 - 'pulsar': 'pulsar', # noqa: E501 - 'slack': 'slack', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerTemplate - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - argo_workflow (IoArgoprojEventsV1alpha1ArgoWorkflowTrigger): [optional] # noqa: E501 - aws_lambda (IoArgoprojEventsV1alpha1AWSLambdaTrigger): [optional] # noqa: E501 - azure_event_hubs (IoArgoprojEventsV1alpha1AzureEventHubsTrigger): [optional] # noqa: E501 - azure_service_bus (IoArgoprojEventsV1alpha1AzureServiceBusTrigger): [optional] # noqa: E501 - conditions (str): [optional] # noqa: E501 - conditions_reset ([IoArgoprojEventsV1alpha1ConditionsResetCriteria]): [optional] # noqa: E501 - custom (IoArgoprojEventsV1alpha1CustomTrigger): [optional] # noqa: E501 - email (IoArgoprojEventsV1alpha1EmailTrigger): [optional] # noqa: E501 - http (IoArgoprojEventsV1alpha1HTTPTrigger): [optional] # noqa: E501 - k8s (IoArgoprojEventsV1alpha1StandardK8STrigger): [optional] # noqa: E501 - kafka (IoArgoprojEventsV1alpha1KafkaTrigger): [optional] # noqa: E501 - log (IoArgoprojEventsV1alpha1LogTrigger): [optional] # noqa: E501 - name (str): Name is a unique name of the action to take.. [optional] # noqa: E501 - nats (IoArgoprojEventsV1alpha1NATSTrigger): [optional] # noqa: E501 - open_whisk (IoArgoprojEventsV1alpha1OpenWhiskTrigger): [optional] # noqa: E501 - pulsar (IoArgoprojEventsV1alpha1PulsarTrigger): [optional] # noqa: E501 - slack (IoArgoprojEventsV1alpha1SlackTrigger): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerTemplate - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - argo_workflow (IoArgoprojEventsV1alpha1ArgoWorkflowTrigger): [optional] # noqa: E501 - aws_lambda (IoArgoprojEventsV1alpha1AWSLambdaTrigger): [optional] # noqa: E501 - azure_event_hubs (IoArgoprojEventsV1alpha1AzureEventHubsTrigger): [optional] # noqa: E501 - azure_service_bus (IoArgoprojEventsV1alpha1AzureServiceBusTrigger): [optional] # noqa: E501 - conditions (str): [optional] # noqa: E501 - conditions_reset ([IoArgoprojEventsV1alpha1ConditionsResetCriteria]): [optional] # noqa: E501 - custom (IoArgoprojEventsV1alpha1CustomTrigger): [optional] # noqa: E501 - email (IoArgoprojEventsV1alpha1EmailTrigger): [optional] # noqa: E501 - http (IoArgoprojEventsV1alpha1HTTPTrigger): [optional] # noqa: E501 - k8s (IoArgoprojEventsV1alpha1StandardK8STrigger): [optional] # noqa: E501 - kafka (IoArgoprojEventsV1alpha1KafkaTrigger): [optional] # noqa: E501 - log (IoArgoprojEventsV1alpha1LogTrigger): [optional] # noqa: E501 - name (str): Name is a unique name of the action to take.. [optional] # noqa: E501 - nats (IoArgoprojEventsV1alpha1NATSTrigger): [optional] # noqa: E501 - open_whisk (IoArgoprojEventsV1alpha1OpenWhiskTrigger): [optional] # noqa: E501 - pulsar (IoArgoprojEventsV1alpha1PulsarTrigger): [optional] # noqa: E501 - slack (IoArgoprojEventsV1alpha1SlackTrigger): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_url_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_url_artifact.py deleted file mode 100644 index a8e63789e05d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_url_artifact.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1URLArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'path': (str,), # noqa: E501 - 'verify_cert': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'path': 'path', # noqa: E501 - 'verify_cert': 'verifyCert', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1URLArtifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - path (str): [optional] # noqa: E501 - verify_cert (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1URLArtifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - path (str): [optional] # noqa: E501 - verify_cert (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_value_from_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_value_from_source.py deleted file mode 100644 index dad95c34b8e5..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_value_from_source.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1ValueFromSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'config_map_key_ref': (ConfigMapKeySelector,), # noqa: E501 - 'secret_key_ref': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'config_map_key_ref': 'configMapKeyRef', # noqa: E501 - 'secret_key_ref': 'secretKeyRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ValueFromSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ValueFromSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_watch_path_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_watch_path_config.py deleted file mode 100644 index dba84388ad4e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_watch_path_config.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojEventsV1alpha1WatchPathConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'directory': (str,), # noqa: E501 - 'path': (str,), # noqa: E501 - 'path_regexp': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'directory': 'directory', # noqa: E501 - 'path': 'path', # noqa: E501 - 'path_regexp': 'pathRegexp', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WatchPathConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - directory (str): [optional] # noqa: E501 - path (str): [optional] # noqa: E501 - path_regexp (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WatchPathConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - directory (str): [optional] # noqa: E501 - path (str): [optional] # noqa: E501 - path_regexp (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_context.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_context.py deleted file mode 100644 index 921fbda41a0e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_context.py +++ /dev/null @@ -1,293 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojEventsV1alpha1WebhookContext(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'auth_secret': (SecretKeySelector,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'max_payload_size': (str,), # noqa: E501 - 'metadata': ({str: (str,)},), # noqa: E501 - 'method': (str,), # noqa: E501 - 'port': (str,), # noqa: E501 - 'server_cert_secret': (SecretKeySelector,), # noqa: E501 - 'server_key_secret': (SecretKeySelector,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'auth_secret': 'authSecret', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'max_payload_size': 'maxPayloadSize', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'method': 'method', # noqa: E501 - 'port': 'port', # noqa: E501 - 'server_cert_secret': 'serverCertSecret', # noqa: E501 - 'server_key_secret': 'serverKeySecret', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WebhookContext - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth_secret (SecretKeySelector): [optional] # noqa: E501 - endpoint (str): [optional] # noqa: E501 - max_payload_size (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - method (str): [optional] # noqa: E501 - port (str): Port on which HTTP server is listening for incoming events.. [optional] # noqa: E501 - server_cert_secret (SecretKeySelector): [optional] # noqa: E501 - server_key_secret (SecretKeySelector): [optional] # noqa: E501 - url (str): URL is the url of the server.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WebhookContext - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth_secret (SecretKeySelector): [optional] # noqa: E501 - endpoint (str): [optional] # noqa: E501 - max_payload_size (str): [optional] # noqa: E501 - metadata ({str: (str,)}): [optional] # noqa: E501 - method (str): [optional] # noqa: E501 - port (str): Port on which HTTP server is listening for incoming events.. [optional] # noqa: E501 - server_cert_secret (SecretKeySelector): [optional] # noqa: E501 - server_key_secret (SecretKeySelector): [optional] # noqa: E501 - url (str): URL is the url of the server.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_event_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_event_source.py deleted file mode 100644 index c6316912e390..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_event_source.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext - - -class IoArgoprojEventsV1alpha1WebhookEventSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'webhook_context': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'filter': 'filter', # noqa: E501 - 'webhook_context': 'webhookContext', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WebhookEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - webhook_context (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WebhookEventSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - webhook_context (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_archive_strategy.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_archive_strategy.py deleted file mode 100644 index d9d39be8cb8f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_archive_strategy.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_tar_strategy import IoArgoprojWorkflowV1alpha1TarStrategy - globals()['IoArgoprojWorkflowV1alpha1TarStrategy'] = IoArgoprojWorkflowV1alpha1TarStrategy - - -class IoArgoprojWorkflowV1alpha1ArchiveStrategy(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - '_none': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501 - 'tar': (IoArgoprojWorkflowV1alpha1TarStrategy,), # noqa: E501 - 'zip': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - '_none': 'none', # noqa: E501 - 'tar': 'tar', # noqa: E501 - 'zip': 'zip', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArchiveStrategy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - _none (bool, date, datetime, dict, float, int, list, str, none_type): NoneStrategy indicates to skip tar process and upload the files or directory tree as independent files. Note that if the artifact is a directory, the artifact driver must support the ability to save/load the directory appropriately.. [optional] # noqa: E501 - tar (IoArgoprojWorkflowV1alpha1TarStrategy): [optional] # noqa: E501 - zip (bool, date, datetime, dict, float, int, list, str, none_type): ZipStrategy will unzip zipped input artifacts. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArchiveStrategy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - _none (bool, date, datetime, dict, float, int, list, str, none_type): NoneStrategy indicates to skip tar process and upload the files or directory tree as independent files. Note that if the artifact is a directory, the artifact driver must support the ability to save/load the directory appropriately.. [optional] # noqa: E501 - tar (IoArgoprojWorkflowV1alpha1TarStrategy): [optional] # noqa: E501 - zip (bool, date, datetime, dict, float, int, list, str, none_type): ZipStrategy will unzip zipped input artifacts. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_arguments.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_arguments.py deleted file mode 100644 index 8e3c181bd4e6..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_arguments.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter - globals()['IoArgoprojWorkflowV1alpha1Artifact'] = IoArgoprojWorkflowV1alpha1Artifact - globals()['IoArgoprojWorkflowV1alpha1Parameter'] = IoArgoprojWorkflowV1alpha1Parameter - - -class IoArgoprojWorkflowV1alpha1Arguments(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifacts': ([IoArgoprojWorkflowV1alpha1Artifact],), # noqa: E501 - 'parameters': ([IoArgoprojWorkflowV1alpha1Parameter],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifacts': 'artifacts', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Arguments - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifacts ([IoArgoprojWorkflowV1alpha1Artifact]): Artifacts is the list of artifacts to pass to the template or workflow. [optional] # noqa: E501 - parameters ([IoArgoprojWorkflowV1alpha1Parameter]): Parameters is the list of parameters to pass to the template or workflow. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Arguments - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifacts ([IoArgoprojWorkflowV1alpha1Artifact]): Artifacts is the list of artifacts to pass to the template or workflow. [optional] # noqa: E501 - parameters ([IoArgoprojWorkflowV1alpha1Parameter]): Parameters is the list of parameters to pass to the template or workflow. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_art_gc_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_art_gc_status.py deleted file mode 100644 index 2efbd182e401..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_art_gc_status.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1ArtGCStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'not_specified': (bool,), # noqa: E501 - 'pods_recouped': ({str: (bool,)},), # noqa: E501 - 'strategies_processed': ({str: (bool,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'not_specified': 'notSpecified', # noqa: E501 - 'pods_recouped': 'podsRecouped', # noqa: E501 - 'strategies_processed': 'strategiesProcessed', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtGCStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - not_specified (bool): if this is true, we already checked to see if we need to do it and we don't. [optional] # noqa: E501 - pods_recouped ({str: (bool,)}): have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once. [optional] # noqa: E501 - strategies_processed ({str: (bool,)}): have Pods been started to perform this strategy? (enables us not to re-process what we've already done). [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtGCStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - not_specified (bool): if this is true, we already checked to see if we need to do it and we don't. [optional] # noqa: E501 - pods_recouped ({str: (bool,)}): have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once. [optional] # noqa: E501 - strategies_processed ({str: (bool,)}): have Pods been started to perform this strategy? (enables us not to re-process what we've already done). [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact.py deleted file mode 100644 index 0f3b949ca659..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact.py +++ /dev/null @@ -1,371 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact - globals()['IoArgoprojWorkflowV1alpha1ArchiveStrategy'] = IoArgoprojWorkflowV1alpha1ArchiveStrategy - globals()['IoArgoprojWorkflowV1alpha1ArtifactGC'] = IoArgoprojWorkflowV1alpha1ArtifactGC - globals()['IoArgoprojWorkflowV1alpha1ArtifactoryArtifact'] = IoArgoprojWorkflowV1alpha1ArtifactoryArtifact - globals()['IoArgoprojWorkflowV1alpha1AzureArtifact'] = IoArgoprojWorkflowV1alpha1AzureArtifact - globals()['IoArgoprojWorkflowV1alpha1GCSArtifact'] = IoArgoprojWorkflowV1alpha1GCSArtifact - globals()['IoArgoprojWorkflowV1alpha1GitArtifact'] = IoArgoprojWorkflowV1alpha1GitArtifact - globals()['IoArgoprojWorkflowV1alpha1HDFSArtifact'] = IoArgoprojWorkflowV1alpha1HDFSArtifact - globals()['IoArgoprojWorkflowV1alpha1HTTPArtifact'] = IoArgoprojWorkflowV1alpha1HTTPArtifact - globals()['IoArgoprojWorkflowV1alpha1OSSArtifact'] = IoArgoprojWorkflowV1alpha1OSSArtifact - globals()['IoArgoprojWorkflowV1alpha1RawArtifact'] = IoArgoprojWorkflowV1alpha1RawArtifact - globals()['IoArgoprojWorkflowV1alpha1S3Artifact'] = IoArgoprojWorkflowV1alpha1S3Artifact - - -class IoArgoprojWorkflowV1alpha1Artifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'archive': (IoArgoprojWorkflowV1alpha1ArchiveStrategy,), # noqa: E501 - 'archive_logs': (bool,), # noqa: E501 - 'artifact_gc': (IoArgoprojWorkflowV1alpha1ArtifactGC,), # noqa: E501 - 'artifactory': (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact,), # noqa: E501 - 'azure': (IoArgoprojWorkflowV1alpha1AzureArtifact,), # noqa: E501 - 'deleted': (bool,), # noqa: E501 - '_from': (str,), # noqa: E501 - 'from_expression': (str,), # noqa: E501 - 'gcs': (IoArgoprojWorkflowV1alpha1GCSArtifact,), # noqa: E501 - 'git': (IoArgoprojWorkflowV1alpha1GitArtifact,), # noqa: E501 - 'global_name': (str,), # noqa: E501 - 'hdfs': (IoArgoprojWorkflowV1alpha1HDFSArtifact,), # noqa: E501 - 'http': (IoArgoprojWorkflowV1alpha1HTTPArtifact,), # noqa: E501 - 'mode': (int,), # noqa: E501 - 'optional': (bool,), # noqa: E501 - 'oss': (IoArgoprojWorkflowV1alpha1OSSArtifact,), # noqa: E501 - 'path': (str,), # noqa: E501 - 'raw': (IoArgoprojWorkflowV1alpha1RawArtifact,), # noqa: E501 - 'recurse_mode': (bool,), # noqa: E501 - 's3': (IoArgoprojWorkflowV1alpha1S3Artifact,), # noqa: E501 - 'sub_path': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'archive': 'archive', # noqa: E501 - 'archive_logs': 'archiveLogs', # noqa: E501 - 'artifact_gc': 'artifactGC', # noqa: E501 - 'artifactory': 'artifactory', # noqa: E501 - 'azure': 'azure', # noqa: E501 - 'deleted': 'deleted', # noqa: E501 - '_from': 'from', # noqa: E501 - 'from_expression': 'fromExpression', # noqa: E501 - 'gcs': 'gcs', # noqa: E501 - 'git': 'git', # noqa: E501 - 'global_name': 'globalName', # noqa: E501 - 'hdfs': 'hdfs', # noqa: E501 - 'http': 'http', # noqa: E501 - 'mode': 'mode', # noqa: E501 - 'optional': 'optional', # noqa: E501 - 'oss': 'oss', # noqa: E501 - 'path': 'path', # noqa: E501 - 'raw': 'raw', # noqa: E501 - 'recurse_mode': 'recurseMode', # noqa: E501 - 's3': 's3', # noqa: E501 - 'sub_path': 'subPath', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Artifact - a model defined in OpenAPI - - Args: - name (str): name of the artifact. must be unique within a template's inputs/outputs. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive (IoArgoprojWorkflowV1alpha1ArchiveStrategy): [optional] # noqa: E501 - archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 - artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 - artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 - azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 - deleted (bool): Has this been deleted?. [optional] # noqa: E501 - _from (str): From allows an artifact to reference an artifact from a previous step. [optional] # noqa: E501 - from_expression (str): FromExpression, if defined, is evaluated to specify the value for the artifact. [optional] # noqa: E501 - gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 - git (IoArgoprojWorkflowV1alpha1GitArtifact): [optional] # noqa: E501 - global_name (str): GlobalName exports an output artifact to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.artifacts.XXXX}} and in workflow.status.outputs.artifacts. [optional] # noqa: E501 - hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifact): [optional] # noqa: E501 - http (IoArgoprojWorkflowV1alpha1HTTPArtifact): [optional] # noqa: E501 - mode (int): mode bits to use on this file, must be a value between 0 and 0777 set when loading input artifacts.. [optional] # noqa: E501 - optional (bool): Make Artifacts optional, if Artifacts doesn't generate or exist. [optional] # noqa: E501 - oss (IoArgoprojWorkflowV1alpha1OSSArtifact): [optional] # noqa: E501 - path (str): Path is the container path to the artifact. [optional] # noqa: E501 - raw (IoArgoprojWorkflowV1alpha1RawArtifact): [optional] # noqa: E501 - recurse_mode (bool): If mode is set, apply the permission recursively into the artifact if it is a folder. [optional] # noqa: E501 - s3 (IoArgoprojWorkflowV1alpha1S3Artifact): [optional] # noqa: E501 - sub_path (str): SubPath allows an artifact to be sourced from a subpath within the specified source. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Artifact - a model defined in OpenAPI - - Args: - name (str): name of the artifact. must be unique within a template's inputs/outputs. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive (IoArgoprojWorkflowV1alpha1ArchiveStrategy): [optional] # noqa: E501 - archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 - artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 - artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 - azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 - deleted (bool): Has this been deleted?. [optional] # noqa: E501 - _from (str): From allows an artifact to reference an artifact from a previous step. [optional] # noqa: E501 - from_expression (str): FromExpression, if defined, is evaluated to specify the value for the artifact. [optional] # noqa: E501 - gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 - git (IoArgoprojWorkflowV1alpha1GitArtifact): [optional] # noqa: E501 - global_name (str): GlobalName exports an output artifact to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.artifacts.XXXX}} and in workflow.status.outputs.artifacts. [optional] # noqa: E501 - hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifact): [optional] # noqa: E501 - http (IoArgoprojWorkflowV1alpha1HTTPArtifact): [optional] # noqa: E501 - mode (int): mode bits to use on this file, must be a value between 0 and 0777 set when loading input artifacts.. [optional] # noqa: E501 - optional (bool): Make Artifacts optional, if Artifacts doesn't generate or exist. [optional] # noqa: E501 - oss (IoArgoprojWorkflowV1alpha1OSSArtifact): [optional] # noqa: E501 - path (str): Path is the container path to the artifact. [optional] # noqa: E501 - raw (IoArgoprojWorkflowV1alpha1RawArtifact): [optional] # noqa: E501 - recurse_mode (bool): If mode is set, apply the permission recursively into the artifact if it is a folder. [optional] # noqa: E501 - s3 (IoArgoprojWorkflowV1alpha1S3Artifact): [optional] # noqa: E501 - sub_path (str): SubPath allows an artifact to be sourced from a subpath within the specified source. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc.py deleted file mode 100644 index 59dcfb17013e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata - globals()['IoArgoprojWorkflowV1alpha1Metadata'] = IoArgoprojWorkflowV1alpha1Metadata - - -class IoArgoprojWorkflowV1alpha1ArtifactGC(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'pod_metadata': (IoArgoprojWorkflowV1alpha1Metadata,), # noqa: E501 - 'service_account_name': (str,), # noqa: E501 - 'strategy': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'pod_metadata': 'podMetadata', # noqa: E501 - 'service_account_name': 'serviceAccountName', # noqa: E501 - 'strategy': 'strategy', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactGC - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - service_account_name (str): ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion. [optional] # noqa: E501 - strategy (str): Strategy is the strategy to use.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactGC - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - service_account_name (str): ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion. [optional] # noqa: E501 - strategy (str): Strategy is the strategy to use.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py deleted file mode 100644 index 980053beb018..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_node_spec import IoArgoprojWorkflowV1alpha1ArtifactNodeSpec - globals()['IoArgoprojWorkflowV1alpha1ArtifactNodeSpec'] = IoArgoprojWorkflowV1alpha1ArtifactNodeSpec - - -class IoArgoprojWorkflowV1alpha1ArtifactGCSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifacts_by_node': ({str: (IoArgoprojWorkflowV1alpha1ArtifactNodeSpec,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifacts_by_node': 'artifactsByNode', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactGCSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifacts_by_node ({str: (IoArgoprojWorkflowV1alpha1ArtifactNodeSpec,)}): ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactGCSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifacts_by_node ({str: (IoArgoprojWorkflowV1alpha1ArtifactNodeSpec,)}): ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_status.py deleted file mode 100644 index c572129cf8fe..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_gc_status.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_result_node_status import IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus - globals()['IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus'] = IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus - - -class IoArgoprojWorkflowV1alpha1ArtifactGCStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifact_results_by_node': ({str: (IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifact_results_by_node': 'artifactResultsByNode', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactGCStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_results_by_node ({str: (IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus,)}): ArtifactResultsByNode maps Node name to result. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactGCStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_results_by_node ({str: (IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus,)}): ArtifactResultsByNode maps Node name to result. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_location.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_location.py deleted file mode 100644 index ae0839bfa657..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_location.py +++ /dev/null @@ -1,313 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact - globals()['IoArgoprojWorkflowV1alpha1ArtifactoryArtifact'] = IoArgoprojWorkflowV1alpha1ArtifactoryArtifact - globals()['IoArgoprojWorkflowV1alpha1AzureArtifact'] = IoArgoprojWorkflowV1alpha1AzureArtifact - globals()['IoArgoprojWorkflowV1alpha1GCSArtifact'] = IoArgoprojWorkflowV1alpha1GCSArtifact - globals()['IoArgoprojWorkflowV1alpha1GitArtifact'] = IoArgoprojWorkflowV1alpha1GitArtifact - globals()['IoArgoprojWorkflowV1alpha1HDFSArtifact'] = IoArgoprojWorkflowV1alpha1HDFSArtifact - globals()['IoArgoprojWorkflowV1alpha1HTTPArtifact'] = IoArgoprojWorkflowV1alpha1HTTPArtifact - globals()['IoArgoprojWorkflowV1alpha1OSSArtifact'] = IoArgoprojWorkflowV1alpha1OSSArtifact - globals()['IoArgoprojWorkflowV1alpha1RawArtifact'] = IoArgoprojWorkflowV1alpha1RawArtifact - globals()['IoArgoprojWorkflowV1alpha1S3Artifact'] = IoArgoprojWorkflowV1alpha1S3Artifact - - -class IoArgoprojWorkflowV1alpha1ArtifactLocation(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'archive_logs': (bool,), # noqa: E501 - 'artifactory': (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact,), # noqa: E501 - 'azure': (IoArgoprojWorkflowV1alpha1AzureArtifact,), # noqa: E501 - 'gcs': (IoArgoprojWorkflowV1alpha1GCSArtifact,), # noqa: E501 - 'git': (IoArgoprojWorkflowV1alpha1GitArtifact,), # noqa: E501 - 'hdfs': (IoArgoprojWorkflowV1alpha1HDFSArtifact,), # noqa: E501 - 'http': (IoArgoprojWorkflowV1alpha1HTTPArtifact,), # noqa: E501 - 'oss': (IoArgoprojWorkflowV1alpha1OSSArtifact,), # noqa: E501 - 'raw': (IoArgoprojWorkflowV1alpha1RawArtifact,), # noqa: E501 - 's3': (IoArgoprojWorkflowV1alpha1S3Artifact,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'archive_logs': 'archiveLogs', # noqa: E501 - 'artifactory': 'artifactory', # noqa: E501 - 'azure': 'azure', # noqa: E501 - 'gcs': 'gcs', # noqa: E501 - 'git': 'git', # noqa: E501 - 'hdfs': 'hdfs', # noqa: E501 - 'http': 'http', # noqa: E501 - 'oss': 'oss', # noqa: E501 - 'raw': 'raw', # noqa: E501 - 's3': 's3', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactLocation - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 - artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 - azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 - gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 - git (IoArgoprojWorkflowV1alpha1GitArtifact): [optional] # noqa: E501 - hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifact): [optional] # noqa: E501 - http (IoArgoprojWorkflowV1alpha1HTTPArtifact): [optional] # noqa: E501 - oss (IoArgoprojWorkflowV1alpha1OSSArtifact): [optional] # noqa: E501 - raw (IoArgoprojWorkflowV1alpha1RawArtifact): [optional] # noqa: E501 - s3 (IoArgoprojWorkflowV1alpha1S3Artifact): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactLocation - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 - artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 - azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 - gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 - git (IoArgoprojWorkflowV1alpha1GitArtifact): [optional] # noqa: E501 - hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifact): [optional] # noqa: E501 - http (IoArgoprojWorkflowV1alpha1HTTPArtifact): [optional] # noqa: E501 - oss (IoArgoprojWorkflowV1alpha1OSSArtifact): [optional] # noqa: E501 - raw (IoArgoprojWorkflowV1alpha1RawArtifact): [optional] # noqa: E501 - s3 (IoArgoprojWorkflowV1alpha1S3Artifact): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_node_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_node_spec.py deleted file mode 100644 index 9a9738702e47..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_node_spec.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation - globals()['IoArgoprojWorkflowV1alpha1Artifact'] = IoArgoprojWorkflowV1alpha1Artifact - globals()['IoArgoprojWorkflowV1alpha1ArtifactLocation'] = IoArgoprojWorkflowV1alpha1ArtifactLocation - - -class IoArgoprojWorkflowV1alpha1ArtifactNodeSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'archive_location': (IoArgoprojWorkflowV1alpha1ArtifactLocation,), # noqa: E501 - 'artifacts': ({str: (IoArgoprojWorkflowV1alpha1Artifact,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'archive_location': 'archiveLocation', # noqa: E501 - 'artifacts': 'artifacts', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactNodeSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive_location (IoArgoprojWorkflowV1alpha1ArtifactLocation): [optional] # noqa: E501 - artifacts ({str: (IoArgoprojWorkflowV1alpha1Artifact,)}): Artifacts maps artifact name to Artifact description. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactNodeSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive_location (IoArgoprojWorkflowV1alpha1ArtifactLocation): [optional] # noqa: E501 - artifacts ({str: (IoArgoprojWorkflowV1alpha1Artifact,)}): Artifacts maps artifact name to Artifact description. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_paths.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_paths.py deleted file mode 100644 index ff235ad19c04..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_paths.py +++ /dev/null @@ -1,371 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact - globals()['IoArgoprojWorkflowV1alpha1ArchiveStrategy'] = IoArgoprojWorkflowV1alpha1ArchiveStrategy - globals()['IoArgoprojWorkflowV1alpha1ArtifactGC'] = IoArgoprojWorkflowV1alpha1ArtifactGC - globals()['IoArgoprojWorkflowV1alpha1ArtifactoryArtifact'] = IoArgoprojWorkflowV1alpha1ArtifactoryArtifact - globals()['IoArgoprojWorkflowV1alpha1AzureArtifact'] = IoArgoprojWorkflowV1alpha1AzureArtifact - globals()['IoArgoprojWorkflowV1alpha1GCSArtifact'] = IoArgoprojWorkflowV1alpha1GCSArtifact - globals()['IoArgoprojWorkflowV1alpha1GitArtifact'] = IoArgoprojWorkflowV1alpha1GitArtifact - globals()['IoArgoprojWorkflowV1alpha1HDFSArtifact'] = IoArgoprojWorkflowV1alpha1HDFSArtifact - globals()['IoArgoprojWorkflowV1alpha1HTTPArtifact'] = IoArgoprojWorkflowV1alpha1HTTPArtifact - globals()['IoArgoprojWorkflowV1alpha1OSSArtifact'] = IoArgoprojWorkflowV1alpha1OSSArtifact - globals()['IoArgoprojWorkflowV1alpha1RawArtifact'] = IoArgoprojWorkflowV1alpha1RawArtifact - globals()['IoArgoprojWorkflowV1alpha1S3Artifact'] = IoArgoprojWorkflowV1alpha1S3Artifact - - -class IoArgoprojWorkflowV1alpha1ArtifactPaths(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'archive': (IoArgoprojWorkflowV1alpha1ArchiveStrategy,), # noqa: E501 - 'archive_logs': (bool,), # noqa: E501 - 'artifact_gc': (IoArgoprojWorkflowV1alpha1ArtifactGC,), # noqa: E501 - 'artifactory': (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact,), # noqa: E501 - 'azure': (IoArgoprojWorkflowV1alpha1AzureArtifact,), # noqa: E501 - 'deleted': (bool,), # noqa: E501 - '_from': (str,), # noqa: E501 - 'from_expression': (str,), # noqa: E501 - 'gcs': (IoArgoprojWorkflowV1alpha1GCSArtifact,), # noqa: E501 - 'git': (IoArgoprojWorkflowV1alpha1GitArtifact,), # noqa: E501 - 'global_name': (str,), # noqa: E501 - 'hdfs': (IoArgoprojWorkflowV1alpha1HDFSArtifact,), # noqa: E501 - 'http': (IoArgoprojWorkflowV1alpha1HTTPArtifact,), # noqa: E501 - 'mode': (int,), # noqa: E501 - 'optional': (bool,), # noqa: E501 - 'oss': (IoArgoprojWorkflowV1alpha1OSSArtifact,), # noqa: E501 - 'path': (str,), # noqa: E501 - 'raw': (IoArgoprojWorkflowV1alpha1RawArtifact,), # noqa: E501 - 'recurse_mode': (bool,), # noqa: E501 - 's3': (IoArgoprojWorkflowV1alpha1S3Artifact,), # noqa: E501 - 'sub_path': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'archive': 'archive', # noqa: E501 - 'archive_logs': 'archiveLogs', # noqa: E501 - 'artifact_gc': 'artifactGC', # noqa: E501 - 'artifactory': 'artifactory', # noqa: E501 - 'azure': 'azure', # noqa: E501 - 'deleted': 'deleted', # noqa: E501 - '_from': 'from', # noqa: E501 - 'from_expression': 'fromExpression', # noqa: E501 - 'gcs': 'gcs', # noqa: E501 - 'git': 'git', # noqa: E501 - 'global_name': 'globalName', # noqa: E501 - 'hdfs': 'hdfs', # noqa: E501 - 'http': 'http', # noqa: E501 - 'mode': 'mode', # noqa: E501 - 'optional': 'optional', # noqa: E501 - 'oss': 'oss', # noqa: E501 - 'path': 'path', # noqa: E501 - 'raw': 'raw', # noqa: E501 - 'recurse_mode': 'recurseMode', # noqa: E501 - 's3': 's3', # noqa: E501 - 'sub_path': 'subPath', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactPaths - a model defined in OpenAPI - - Args: - name (str): name of the artifact. must be unique within a template's inputs/outputs. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive (IoArgoprojWorkflowV1alpha1ArchiveStrategy): [optional] # noqa: E501 - archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 - artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 - artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 - azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 - deleted (bool): Has this been deleted?. [optional] # noqa: E501 - _from (str): From allows an artifact to reference an artifact from a previous step. [optional] # noqa: E501 - from_expression (str): FromExpression, if defined, is evaluated to specify the value for the artifact. [optional] # noqa: E501 - gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 - git (IoArgoprojWorkflowV1alpha1GitArtifact): [optional] # noqa: E501 - global_name (str): GlobalName exports an output artifact to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.artifacts.XXXX}} and in workflow.status.outputs.artifacts. [optional] # noqa: E501 - hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifact): [optional] # noqa: E501 - http (IoArgoprojWorkflowV1alpha1HTTPArtifact): [optional] # noqa: E501 - mode (int): mode bits to use on this file, must be a value between 0 and 0777 set when loading input artifacts.. [optional] # noqa: E501 - optional (bool): Make Artifacts optional, if Artifacts doesn't generate or exist. [optional] # noqa: E501 - oss (IoArgoprojWorkflowV1alpha1OSSArtifact): [optional] # noqa: E501 - path (str): Path is the container path to the artifact. [optional] # noqa: E501 - raw (IoArgoprojWorkflowV1alpha1RawArtifact): [optional] # noqa: E501 - recurse_mode (bool): If mode is set, apply the permission recursively into the artifact if it is a folder. [optional] # noqa: E501 - s3 (IoArgoprojWorkflowV1alpha1S3Artifact): [optional] # noqa: E501 - sub_path (str): SubPath allows an artifact to be sourced from a subpath within the specified source. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactPaths - a model defined in OpenAPI - - Args: - name (str): name of the artifact. must be unique within a template's inputs/outputs. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive (IoArgoprojWorkflowV1alpha1ArchiveStrategy): [optional] # noqa: E501 - archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 - artifact_gc (IoArgoprojWorkflowV1alpha1ArtifactGC): [optional] # noqa: E501 - artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifact): [optional] # noqa: E501 - azure (IoArgoprojWorkflowV1alpha1AzureArtifact): [optional] # noqa: E501 - deleted (bool): Has this been deleted?. [optional] # noqa: E501 - _from (str): From allows an artifact to reference an artifact from a previous step. [optional] # noqa: E501 - from_expression (str): FromExpression, if defined, is evaluated to specify the value for the artifact. [optional] # noqa: E501 - gcs (IoArgoprojWorkflowV1alpha1GCSArtifact): [optional] # noqa: E501 - git (IoArgoprojWorkflowV1alpha1GitArtifact): [optional] # noqa: E501 - global_name (str): GlobalName exports an output artifact to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.artifacts.XXXX}} and in workflow.status.outputs.artifacts. [optional] # noqa: E501 - hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifact): [optional] # noqa: E501 - http (IoArgoprojWorkflowV1alpha1HTTPArtifact): [optional] # noqa: E501 - mode (int): mode bits to use on this file, must be a value between 0 and 0777 set when loading input artifacts.. [optional] # noqa: E501 - optional (bool): Make Artifacts optional, if Artifacts doesn't generate or exist. [optional] # noqa: E501 - oss (IoArgoprojWorkflowV1alpha1OSSArtifact): [optional] # noqa: E501 - path (str): Path is the container path to the artifact. [optional] # noqa: E501 - raw (IoArgoprojWorkflowV1alpha1RawArtifact): [optional] # noqa: E501 - recurse_mode (bool): If mode is set, apply the permission recursively into the artifact if it is a folder. [optional] # noqa: E501 - s3 (IoArgoprojWorkflowV1alpha1S3Artifact): [optional] # noqa: E501 - sub_path (str): SubPath allows an artifact to be sourced from a subpath within the specified source. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository.py deleted file mode 100644 index 7e8fede73e29..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository - from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact_repository import IoArgoprojWorkflowV1alpha1AzureArtifactRepository - from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact_repository import IoArgoprojWorkflowV1alpha1GCSArtifactRepository - from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact_repository import IoArgoprojWorkflowV1alpha1HDFSArtifactRepository - from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_artifact_repository import IoArgoprojWorkflowV1alpha1OSSArtifactRepository - from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact_repository import IoArgoprojWorkflowV1alpha1S3ArtifactRepository - globals()['IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository'] = IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository - globals()['IoArgoprojWorkflowV1alpha1AzureArtifactRepository'] = IoArgoprojWorkflowV1alpha1AzureArtifactRepository - globals()['IoArgoprojWorkflowV1alpha1GCSArtifactRepository'] = IoArgoprojWorkflowV1alpha1GCSArtifactRepository - globals()['IoArgoprojWorkflowV1alpha1HDFSArtifactRepository'] = IoArgoprojWorkflowV1alpha1HDFSArtifactRepository - globals()['IoArgoprojWorkflowV1alpha1OSSArtifactRepository'] = IoArgoprojWorkflowV1alpha1OSSArtifactRepository - globals()['IoArgoprojWorkflowV1alpha1S3ArtifactRepository'] = IoArgoprojWorkflowV1alpha1S3ArtifactRepository - - -class IoArgoprojWorkflowV1alpha1ArtifactRepository(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'archive_logs': (bool,), # noqa: E501 - 'artifactory': (IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository,), # noqa: E501 - 'azure': (IoArgoprojWorkflowV1alpha1AzureArtifactRepository,), # noqa: E501 - 'gcs': (IoArgoprojWorkflowV1alpha1GCSArtifactRepository,), # noqa: E501 - 'hdfs': (IoArgoprojWorkflowV1alpha1HDFSArtifactRepository,), # noqa: E501 - 'oss': (IoArgoprojWorkflowV1alpha1OSSArtifactRepository,), # noqa: E501 - 's3': (IoArgoprojWorkflowV1alpha1S3ArtifactRepository,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'archive_logs': 'archiveLogs', # noqa: E501 - 'artifactory': 'artifactory', # noqa: E501 - 'azure': 'azure', # noqa: E501 - 'gcs': 'gcs', # noqa: E501 - 'hdfs': 'hdfs', # noqa: E501 - 'oss': 'oss', # noqa: E501 - 's3': 's3', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive_logs (bool): ArchiveLogs enables log archiving. [optional] # noqa: E501 - artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository): [optional] # noqa: E501 - azure (IoArgoprojWorkflowV1alpha1AzureArtifactRepository): [optional] # noqa: E501 - gcs (IoArgoprojWorkflowV1alpha1GCSArtifactRepository): [optional] # noqa: E501 - hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifactRepository): [optional] # noqa: E501 - oss (IoArgoprojWorkflowV1alpha1OSSArtifactRepository): [optional] # noqa: E501 - s3 (IoArgoprojWorkflowV1alpha1S3ArtifactRepository): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - archive_logs (bool): ArchiveLogs enables log archiving. [optional] # noqa: E501 - artifactory (IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository): [optional] # noqa: E501 - azure (IoArgoprojWorkflowV1alpha1AzureArtifactRepository): [optional] # noqa: E501 - gcs (IoArgoprojWorkflowV1alpha1GCSArtifactRepository): [optional] # noqa: E501 - hdfs (IoArgoprojWorkflowV1alpha1HDFSArtifactRepository): [optional] # noqa: E501 - oss (IoArgoprojWorkflowV1alpha1OSSArtifactRepository): [optional] # noqa: E501 - s3 (IoArgoprojWorkflowV1alpha1S3ArtifactRepository): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository_ref.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository_ref.py deleted file mode 100644 index 192bbc81b767..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository_ref.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'config_map': (str,), # noqa: E501 - 'key': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'config_map': 'configMap', # noqa: E501 - 'key': 'key', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map (str): The name of the config map. Defaults to \"artifact-repositories\".. [optional] # noqa: E501 - key (str): The config map key. Defaults to the value of the \"workflows.argoproj.io/default-artifact-repository\" annotation.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map (str): The name of the config map. Defaults to \"artifact-repositories\".. [optional] # noqa: E501 - key (str): The config map key. Defaults to the value of the \"workflows.argoproj.io/default-artifact-repository\" annotation.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository_ref_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository_ref_status.py deleted file mode 100644 index 32dc97e78f70..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_repository_ref_status.py +++ /dev/null @@ -1,277 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactRepository - globals()['IoArgoprojWorkflowV1alpha1ArtifactRepository'] = IoArgoprojWorkflowV1alpha1ArtifactRepository - - -class IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifact_repository': (IoArgoprojWorkflowV1alpha1ArtifactRepository,), # noqa: E501 - 'config_map': (str,), # noqa: E501 - 'default': (bool,), # noqa: E501 - 'key': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifact_repository': 'artifactRepository', # noqa: E501 - 'config_map': 'configMap', # noqa: E501 - 'default': 'default', # noqa: E501 - 'key': 'key', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_repository (IoArgoprojWorkflowV1alpha1ArtifactRepository): [optional] # noqa: E501 - config_map (str): The name of the config map. Defaults to \"artifact-repositories\".. [optional] # noqa: E501 - default (bool): If this ref represents the default artifact repository, rather than a config map.. [optional] # noqa: E501 - key (str): The config map key. Defaults to the value of the \"workflows.argoproj.io/default-artifact-repository\" annotation.. [optional] # noqa: E501 - namespace (str): The namespace of the config map. Defaults to the workflow's namespace, or the controller's namespace (if found).. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_repository (IoArgoprojWorkflowV1alpha1ArtifactRepository): [optional] # noqa: E501 - config_map (str): The name of the config map. Defaults to \"artifact-repositories\".. [optional] # noqa: E501 - default (bool): If this ref represents the default artifact repository, rather than a config map.. [optional] # noqa: E501 - key (str): The config map key. Defaults to the value of the \"workflows.argoproj.io/default-artifact-repository\" annotation.. [optional] # noqa: E501 - namespace (str): The namespace of the config map. Defaults to the workflow's namespace, or the controller's namespace (if found).. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result.py deleted file mode 100644 index e7499b43b44b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1ArtifactResult(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'error': (str,), # noqa: E501 - 'success': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'error': 'error', # noqa: E501 - 'success': 'success', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactResult - a model defined in OpenAPI - - Args: - name (str): Name is the name of the Artifact - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (str): Error is an optional error message which should be set if Success==false. [optional] # noqa: E501 - success (bool): Success describes whether the deletion succeeded. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactResult - a model defined in OpenAPI - - Args: - name (str): Name is the name of the Artifact - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (str): Error is an optional error message which should be set if Success==false. [optional] # noqa: E501 - success (bool): Success describes whether the deletion succeeded. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py deleted file mode 100644 index 67c623b2be37..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_result import IoArgoprojWorkflowV1alpha1ArtifactResult - globals()['IoArgoprojWorkflowV1alpha1ArtifactResult'] = IoArgoprojWorkflowV1alpha1ArtifactResult - - -class IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifact_results': ({str: (IoArgoprojWorkflowV1alpha1ArtifactResult,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifact_results': 'artifactResults', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_results ({str: (IoArgoprojWorkflowV1alpha1ArtifactResult,)}): ArtifactResults maps Artifact name to result of the deletion. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_results ({str: (IoArgoprojWorkflowV1alpha1ArtifactResult,)}): ArtifactResults maps Artifact name to result of the deletion. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifactory_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifactory_artifact.py deleted file mode 100644 index 7d4ed2d35e9b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifactory_artifact.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1ArtifactoryArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'url': (str,), # noqa: E501 - 'password_secret': (SecretKeySelector,), # noqa: E501 - 'username_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'url': 'url', # noqa: E501 - 'password_secret': 'passwordSecret', # noqa: E501 - 'username_secret': 'usernameSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, url, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactoryArtifact - a model defined in OpenAPI - - Args: - url (str): URL of the artifact - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password_secret (SecretKeySelector): [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.url = url - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, url, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactoryArtifact - a model defined in OpenAPI - - Args: - url (str): URL of the artifact - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password_secret (SecretKeySelector): [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.url = url - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifactory_artifact_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifactory_artifact_repository.py deleted file mode 100644 index 9f5d82327748..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_artifactory_artifact_repository.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'key_format': (str,), # noqa: E501 - 'password_secret': (SecretKeySelector,), # noqa: E501 - 'repo_url': (str,), # noqa: E501 - 'username_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key_format': 'keyFormat', # noqa: E501 - 'password_secret': 'passwordSecret', # noqa: E501 - 'repo_url': 'repoURL', # noqa: E501 - 'username_secret': 'usernameSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - key_format (str): KeyFormat defines the format of how to store keys and can reference workflow variables.. [optional] # noqa: E501 - password_secret (SecretKeySelector): [optional] # noqa: E501 - repo_url (str): RepoURL is the url for artifactory repo.. [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - key_format (str): KeyFormat defines the format of how to store keys and can reference workflow variables.. [optional] # noqa: E501 - password_secret (SecretKeySelector): [optional] # noqa: E501 - repo_url (str): RepoURL is the url for artifactory repo.. [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact.py deleted file mode 100644 index 2e2eae2d0f4b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1AzureArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'blob': (str,), # noqa: E501 - 'container': (str,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'account_key_secret': (SecretKeySelector,), # noqa: E501 - 'use_sdk_creds': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'blob': 'blob', # noqa: E501 - 'container': 'container', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'account_key_secret': 'accountKeySecret', # noqa: E501 - 'use_sdk_creds': 'useSDKCreds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, blob, container, endpoint, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1AzureArtifact - a model defined in OpenAPI - - Args: - blob (str): Blob is the blob name (i.e., path) in the container where the artifact resides - container (str): Container is the container where resources will be stored - endpoint (str): Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\" - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - account_key_secret (SecretKeySelector): [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.blob = blob - self.container = container - self.endpoint = endpoint - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, blob, container, endpoint, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1AzureArtifact - a model defined in OpenAPI - - Args: - blob (str): Blob is the blob name (i.e., path) in the container where the artifact resides - container (str): Container is the container where resources will be stored - endpoint (str): Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\" - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - account_key_secret (SecretKeySelector): [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.blob = blob - self.container = container - self.endpoint = endpoint - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py deleted file mode 100644 index 14aff3f604a4..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py +++ /dev/null @@ -1,285 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1AzureArtifactRepository(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'container': (str,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'account_key_secret': (SecretKeySelector,), # noqa: E501 - 'blob_name_format': (str,), # noqa: E501 - 'use_sdk_creds': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'container': 'container', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'account_key_secret': 'accountKeySecret', # noqa: E501 - 'blob_name_format': 'blobNameFormat', # noqa: E501 - 'use_sdk_creds': 'useSDKCreds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, container, endpoint, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1AzureArtifactRepository - a model defined in OpenAPI - - Args: - container (str): Container is the container where resources will be stored - endpoint (str): Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\" - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - account_key_secret (SecretKeySelector): [optional] # noqa: E501 - blob_name_format (str): BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables. [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.container = container - self.endpoint = endpoint - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, container, endpoint, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1AzureArtifactRepository - a model defined in OpenAPI - - Args: - container (str): Container is the container where resources will be stored - endpoint (str): Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\" - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - account_key_secret (SecretKeySelector): [optional] # noqa: E501 - blob_name_format (str): BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables. [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.container = container - self.endpoint = endpoint - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_backoff.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_backoff.py deleted file mode 100644 index 0b3b121164a0..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_backoff.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Backoff(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'duration': (str,), # noqa: E501 - 'factor': (str,), # noqa: E501 - 'max_duration': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'duration': 'duration', # noqa: E501 - 'factor': 'factor', # noqa: E501 - 'max_duration': 'maxDuration', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Backoff - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (str): Duration is the amount to back off. Default unit is seconds, but could also be a duration (e.g. \"2m\", \"1h\"). [optional] # noqa: E501 - factor (str): [optional] # noqa: E501 - max_duration (str): MaxDuration is the maximum amount of time allowed for a workflow in the backoff strategy. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Backoff - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (str): Duration is the amount to back off. Default unit is seconds, but could also be a duration (e.g. \"2m\", \"1h\"). [optional] # noqa: E501 - factor (str): [optional] # noqa: E501 - max_duration (str): MaxDuration is the maximum amount of time allowed for a workflow in the backoff strategy. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_basic_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_basic_auth.py deleted file mode 100644 index 224d5362979f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_basic_auth.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1BasicAuth(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'password_secret': (SecretKeySelector,), # noqa: E501 - 'username_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'password_secret': 'passwordSecret', # noqa: E501 - 'username_secret': 'usernameSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1BasicAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password_secret (SecretKeySelector): [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1BasicAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - password_secret (SecretKeySelector): [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cache.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cache.py deleted file mode 100644 index e5fc792e1e73..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cache.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - - -class IoArgoprojWorkflowV1alpha1Cache(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'config_map': (ConfigMapKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'config_map': 'configMap', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, config_map, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Cache - a model defined in OpenAPI - - Args: - config_map (ConfigMapKeySelector): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.config_map = config_map - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, config_map, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Cache - a model defined in OpenAPI - - Args: - config_map (ConfigMapKeySelector): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.config_map = config_map - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_client_cert_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_client_cert_auth.py deleted file mode 100644 index ad3aa2125090..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_client_cert_auth.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1ClientCertAuth(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'client_cert_secret': (SecretKeySelector,), # noqa: E501 - 'client_key_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'client_cert_secret': 'clientCertSecret', # noqa: E501 - 'client_key_secret': 'clientKeySecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClientCertAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - client_cert_secret (SecretKeySelector): [optional] # noqa: E501 - client_key_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClientCertAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - client_cert_secret (SecretKeySelector): [optional] # noqa: E501 - client_key_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template.py deleted file mode 100644 index b9b2c127ad4d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec - from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojWorkflowV1alpha1WorkflowSpec'] = IoArgoprojWorkflowV1alpha1WorkflowSpec - globals()['ObjectMeta'] = ObjectMeta - - -class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (IoArgoprojWorkflowV1alpha1WorkflowSpec,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request.py deleted file mode 100644 index bec8ab170943..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.create_options import CreateOptions - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - globals()['CreateOptions'] = CreateOptions - globals()['IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate'] = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - - -class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'create_options': (CreateOptions,), # noqa: E501 - 'template': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'create_options': 'createOptions', # noqa: E501 - 'template': 'template', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request.py deleted file mode 100644 index 5d9338f8ad30..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.create_options import CreateOptions - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - globals()['CreateOptions'] = CreateOptions - globals()['IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate'] = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - - -class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'create_options': (CreateOptions,), # noqa: E501 - 'template': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'create_options': 'createOptions', # noqa: E501 - 'template': 'template', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_list.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_list.py deleted file mode 100644 index c24373aa482d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_list.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - from argo_workflows.model.list_meta import ListMeta - globals()['IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate'] = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - globals()['ListMeta'] = ListMeta - - -class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate],), # noqa: E501 - 'metadata': (ListMeta,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request.py deleted file mode 100644 index e4335b74a006..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - globals()['IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate'] = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate - - -class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'template': (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'template': 'template', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): DEPRECATED: This field is ignored.. [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): DEPRECATED: This field is ignored.. [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_collect_event_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_collect_event_request.py deleted file mode 100644 index 2a438b7bb7e6..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_collect_event_request.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1CollectEventRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CollectEventRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CollectEventRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_column.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_column.py deleted file mode 100644 index 51b334370e39..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_column.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Column(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'name': 'name', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, name, type, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Column - a model defined in OpenAPI - - Args: - key (str): The key of the label or annotation, e.g., \"workflows.argoproj.io/completed\". - name (str): The name of this column, e.g., \"Workflow Completed\". - type (str): The type of this column, \"label\" or \"annotation\". - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.name = name - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, name, type, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Column - a model defined in OpenAPI - - Args: - key (str): The key of the label or annotation, e.g., \"workflows.argoproj.io/completed\". - name (str): The name of this column, e.g., \"Workflow Completed\". - type (str): The type of this column, \"label\" or \"annotation\". - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.name = name - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_condition.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_condition.py deleted file mode 100644 index 2402926c1a8a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_condition.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Condition(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'message': (str,), # noqa: E501 - 'status': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'message': 'message', # noqa: E501 - 'status': 'status', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Condition - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - message (str): Message is the condition message. [optional] # noqa: E501 - status (str): Status is the status of the condition. [optional] # noqa: E501 - type (str): Type is the type of condition. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Condition - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - message (str): Message is the condition message. [optional] # noqa: E501 - status (str): Status is the status of the condition. [optional] # noqa: E501 - type (str): Type is the type of condition. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_node.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_node.py deleted file mode 100644 index ddca8f08b4fe..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_node.py +++ /dev/null @@ -1,371 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.container_port import ContainerPort - from argo_workflows.model.env_from_source import EnvFromSource - from argo_workflows.model.env_var import EnvVar - from argo_workflows.model.lifecycle import Lifecycle - from argo_workflows.model.probe import Probe - from argo_workflows.model.resource_requirements import ResourceRequirements - from argo_workflows.model.security_context import SecurityContext - from argo_workflows.model.volume_device import VolumeDevice - from argo_workflows.model.volume_mount import VolumeMount - globals()['ContainerPort'] = ContainerPort - globals()['EnvFromSource'] = EnvFromSource - globals()['EnvVar'] = EnvVar - globals()['Lifecycle'] = Lifecycle - globals()['Probe'] = Probe - globals()['ResourceRequirements'] = ResourceRequirements - globals()['SecurityContext'] = SecurityContext - globals()['VolumeDevice'] = VolumeDevice - globals()['VolumeMount'] = VolumeMount - - -class IoArgoprojWorkflowV1alpha1ContainerNode(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'args': ([str],), # noqa: E501 - 'command': ([str],), # noqa: E501 - 'dependencies': ([str],), # noqa: E501 - 'env': ([EnvVar],), # noqa: E501 - 'env_from': ([EnvFromSource],), # noqa: E501 - 'image': (str,), # noqa: E501 - 'image_pull_policy': (str,), # noqa: E501 - 'lifecycle': (Lifecycle,), # noqa: E501 - 'liveness_probe': (Probe,), # noqa: E501 - 'ports': ([ContainerPort],), # noqa: E501 - 'readiness_probe': (Probe,), # noqa: E501 - 'resources': (ResourceRequirements,), # noqa: E501 - 'security_context': (SecurityContext,), # noqa: E501 - 'startup_probe': (Probe,), # noqa: E501 - 'stdin': (bool,), # noqa: E501 - 'stdin_once': (bool,), # noqa: E501 - 'termination_message_path': (str,), # noqa: E501 - 'termination_message_policy': (str,), # noqa: E501 - 'tty': (bool,), # noqa: E501 - 'volume_devices': ([VolumeDevice],), # noqa: E501 - 'volume_mounts': ([VolumeMount],), # noqa: E501 - 'working_dir': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'args': 'args', # noqa: E501 - 'command': 'command', # noqa: E501 - 'dependencies': 'dependencies', # noqa: E501 - 'env': 'env', # noqa: E501 - 'env_from': 'envFrom', # noqa: E501 - 'image': 'image', # noqa: E501 - 'image_pull_policy': 'imagePullPolicy', # noqa: E501 - 'lifecycle': 'lifecycle', # noqa: E501 - 'liveness_probe': 'livenessProbe', # noqa: E501 - 'ports': 'ports', # noqa: E501 - 'readiness_probe': 'readinessProbe', # noqa: E501 - 'resources': 'resources', # noqa: E501 - 'security_context': 'securityContext', # noqa: E501 - 'startup_probe': 'startupProbe', # noqa: E501 - 'stdin': 'stdin', # noqa: E501 - 'stdin_once': 'stdinOnce', # noqa: E501 - 'termination_message_path': 'terminationMessagePath', # noqa: E501 - 'termination_message_policy': 'terminationMessagePolicy', # noqa: E501 - 'tty': 'tty', # noqa: E501 - 'volume_devices': 'volumeDevices', # noqa: E501 - 'volume_mounts': 'volumeMounts', # noqa: E501 - 'working_dir': 'workingDir', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ContainerNode - a model defined in OpenAPI - - Args: - name (str): Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - dependencies ([str]): [optional] # noqa: E501 - env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 - env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 - image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 - lifecycle (Lifecycle): [optional] # noqa: E501 - liveness_probe (Probe): [optional] # noqa: E501 - ports ([ContainerPort]): List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated.. [optional] # noqa: E501 - readiness_probe (Probe): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - security_context (SecurityContext): [optional] # noqa: E501 - startup_probe (Probe): [optional] # noqa: E501 - stdin (bool): Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.. [optional] # noqa: E501 - stdin_once (bool): Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false. [optional] # noqa: E501 - termination_message_path (str): Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.. [optional] # noqa: E501 - termination_message_policy (str): Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated.. [optional] # noqa: E501 - tty (bool): Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.. [optional] # noqa: E501 - volume_devices ([VolumeDevice]): volumeDevices is the list of block devices to be used by the container.. [optional] # noqa: E501 - volume_mounts ([VolumeMount]): Pod volumes to mount into the container's filesystem. Cannot be updated.. [optional] # noqa: E501 - working_dir (str): Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ContainerNode - a model defined in OpenAPI - - Args: - name (str): Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - dependencies ([str]): [optional] # noqa: E501 - env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 - env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 - image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 - lifecycle (Lifecycle): [optional] # noqa: E501 - liveness_probe (Probe): [optional] # noqa: E501 - ports ([ContainerPort]): List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated.. [optional] # noqa: E501 - readiness_probe (Probe): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - security_context (SecurityContext): [optional] # noqa: E501 - startup_probe (Probe): [optional] # noqa: E501 - stdin (bool): Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.. [optional] # noqa: E501 - stdin_once (bool): Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false. [optional] # noqa: E501 - termination_message_path (str): Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.. [optional] # noqa: E501 - termination_message_policy (str): Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated.. [optional] # noqa: E501 - tty (bool): Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.. [optional] # noqa: E501 - volume_devices ([VolumeDevice]): volumeDevices is the list of block devices to be used by the container.. [optional] # noqa: E501 - volume_mounts ([VolumeMount]): Pod volumes to mount into the container's filesystem. Cannot be updated.. [optional] # noqa: E501 - working_dir (str): Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_set_retry_strategy.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_set_retry_strategy.py deleted file mode 100644 index 280de7d31032..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_set_retry_strategy.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'retries': (str,), # noqa: E501 - 'duration': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'retries': 'retries', # noqa: E501 - 'duration': 'duration', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, retries, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy - a model defined in OpenAPI - - Args: - retries (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (str): Duration is the time between each retry, examples values are \"300ms\", \"1s\" or \"5m\". Valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\", \"m\", \"h\".. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.retries = retries - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, retries, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy - a model defined in OpenAPI - - Args: - retries (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (str): Duration is the time between each retry, examples values are \"300ms\", \"1s\" or \"5m\". Valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\", \"m\", \"h\".. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.retries = retries - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_set_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_set_template.py deleted file mode 100644 index 3f1315306671..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_container_set_template.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_container_node import IoArgoprojWorkflowV1alpha1ContainerNode - from argo_workflows.model.io_argoproj_workflow_v1alpha1_container_set_retry_strategy import IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy - from argo_workflows.model.volume_mount import VolumeMount - globals()['IoArgoprojWorkflowV1alpha1ContainerNode'] = IoArgoprojWorkflowV1alpha1ContainerNode - globals()['IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy'] = IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy - globals()['VolumeMount'] = VolumeMount - - -class IoArgoprojWorkflowV1alpha1ContainerSetTemplate(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'containers': ([IoArgoprojWorkflowV1alpha1ContainerNode],), # noqa: E501 - 'retry_strategy': (IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy,), # noqa: E501 - 'volume_mounts': ([VolumeMount],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'containers': 'containers', # noqa: E501 - 'retry_strategy': 'retryStrategy', # noqa: E501 - 'volume_mounts': 'volumeMounts', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, containers, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ContainerSetTemplate - a model defined in OpenAPI - - Args: - containers ([IoArgoprojWorkflowV1alpha1ContainerNode]): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - retry_strategy (IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy): [optional] # noqa: E501 - volume_mounts ([VolumeMount]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.containers = containers - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, containers, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ContainerSetTemplate - a model defined in OpenAPI - - Args: - containers ([IoArgoprojWorkflowV1alpha1ContainerNode]): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - retry_strategy (IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy): [optional] # noqa: E501 - volume_mounts ([VolumeMount]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.containers = containers - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_continue_on.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_continue_on.py deleted file mode 100644 index dc63fff98dbe..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_continue_on.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1ContinueOn(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'error': (bool,), # noqa: E501 - 'failed': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'failed': 'failed', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ContinueOn - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (bool): [optional] # noqa: E501 - failed (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ContinueOn - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (bool): [optional] # noqa: E501 - failed (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_counter.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_counter.py deleted file mode 100644 index a0af3fc91c6a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_counter.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Counter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Counter - a model defined in OpenAPI - - Args: - value (str): Value is the value of the metric - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Counter - a model defined in OpenAPI - - Args: - value (str): Value is the value of the metric - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_create_cron_workflow_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_create_cron_workflow_request.py deleted file mode 100644 index f1823e76291f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_create_cron_workflow_request.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.create_options import CreateOptions - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow - globals()['CreateOptions'] = CreateOptions - globals()['IoArgoprojWorkflowV1alpha1CronWorkflow'] = IoArgoprojWorkflowV1alpha1CronWorkflow - - -class IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'create_options': (CreateOptions,), # noqa: E501 - 'cron_workflow': (IoArgoprojWorkflowV1alpha1CronWorkflow,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'create_options': 'createOptions', # noqa: E501 - 'cron_workflow': 'cronWorkflow', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - cron_workflow (IoArgoprojWorkflowV1alpha1CronWorkflow): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - cron_workflow (IoArgoprojWorkflowV1alpha1CronWorkflow): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_create_s3_bucket_options.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_create_s3_bucket_options.py deleted file mode 100644 index b031245695a2..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_create_s3_bucket_options.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1CreateS3BucketOptions(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'object_locking': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'object_locking': 'objectLocking', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CreateS3BucketOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object_locking (bool): ObjectLocking Enable object locking. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CreateS3BucketOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object_locking (bool): ObjectLocking Enable object locking. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow.py deleted file mode 100644 index 60a8c145ca38..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow.py +++ /dev/null @@ -1,289 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_spec import IoArgoprojWorkflowV1alpha1CronWorkflowSpec - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_status import IoArgoprojWorkflowV1alpha1CronWorkflowStatus - from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojWorkflowV1alpha1CronWorkflowSpec'] = IoArgoprojWorkflowV1alpha1CronWorkflowSpec - globals()['IoArgoprojWorkflowV1alpha1CronWorkflowStatus'] = IoArgoprojWorkflowV1alpha1CronWorkflowStatus - globals()['ObjectMeta'] = ObjectMeta - - -class IoArgoprojWorkflowV1alpha1CronWorkflow(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (IoArgoprojWorkflowV1alpha1CronWorkflowSpec,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - 'status': (IoArgoprojWorkflowV1alpha1CronWorkflowStatus,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflow - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1CronWorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - status (IoArgoprojWorkflowV1alpha1CronWorkflowStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflow - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1CronWorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - status (IoArgoprojWorkflowV1alpha1CronWorkflowStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_list.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_list.py deleted file mode 100644 index a6c54eec531a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_list.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow - from argo_workflows.model.list_meta import ListMeta - globals()['IoArgoprojWorkflowV1alpha1CronWorkflow'] = IoArgoprojWorkflowV1alpha1CronWorkflow - globals()['ListMeta'] = ListMeta - - -class IoArgoprojWorkflowV1alpha1CronWorkflowList(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([IoArgoprojWorkflowV1alpha1CronWorkflow],), # noqa: E501 - 'metadata': (ListMeta,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1CronWorkflow]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1CronWorkflow]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_resume_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_resume_request.py deleted file mode 100644 index 7f300fa1938b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_resume_request.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_spec.py deleted file mode 100644 index c46fe57be116..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_spec.py +++ /dev/null @@ -1,313 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_stop_strategy import IoArgoprojWorkflowV1alpha1StopStrategy - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec - from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojWorkflowV1alpha1StopStrategy'] = IoArgoprojWorkflowV1alpha1StopStrategy - globals()['IoArgoprojWorkflowV1alpha1WorkflowSpec'] = IoArgoprojWorkflowV1alpha1WorkflowSpec - globals()['ObjectMeta'] = ObjectMeta - - -class IoArgoprojWorkflowV1alpha1CronWorkflowSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'schedule': (str,), # noqa: E501 - 'workflow_spec': (IoArgoprojWorkflowV1alpha1WorkflowSpec,), # noqa: E501 - 'concurrency_policy': (str,), # noqa: E501 - 'failed_jobs_history_limit': (int,), # noqa: E501 - 'schedules': ([str],), # noqa: E501 - 'starting_deadline_seconds': (int,), # noqa: E501 - 'stop_strategy': (IoArgoprojWorkflowV1alpha1StopStrategy,), # noqa: E501 - 'successful_jobs_history_limit': (int,), # noqa: E501 - 'suspend': (bool,), # noqa: E501 - 'timezone': (str,), # noqa: E501 - 'workflow_metadata': (ObjectMeta,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'schedule': 'schedule', # noqa: E501 - 'workflow_spec': 'workflowSpec', # noqa: E501 - 'concurrency_policy': 'concurrencyPolicy', # noqa: E501 - 'failed_jobs_history_limit': 'failedJobsHistoryLimit', # noqa: E501 - 'schedules': 'schedules', # noqa: E501 - 'starting_deadline_seconds': 'startingDeadlineSeconds', # noqa: E501 - 'stop_strategy': 'stopStrategy', # noqa: E501 - 'successful_jobs_history_limit': 'successfulJobsHistoryLimit', # noqa: E501 - 'suspend': 'suspend', # noqa: E501 - 'timezone': 'timezone', # noqa: E501 - 'workflow_metadata': 'workflowMetadata', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, schedule, workflow_spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowSpec - a model defined in OpenAPI - - Args: - schedule (str): Schedule is a schedule to run the Workflow in Cron format - workflow_spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - concurrency_policy (str): ConcurrencyPolicy is the K8s-style concurrency policy that will be used. [optional] # noqa: E501 - failed_jobs_history_limit (int): FailedJobsHistoryLimit is the number of failed jobs to be kept at a time. [optional] # noqa: E501 - schedules ([str]): Schedules is a list of schedules to run the Workflow in Cron format. [optional] # noqa: E501 - starting_deadline_seconds (int): StartingDeadlineSeconds is the K8s-style deadline that will limit the time a CronWorkflow will be run after its original scheduled time if it is missed.. [optional] # noqa: E501 - stop_strategy (IoArgoprojWorkflowV1alpha1StopStrategy): [optional] # noqa: E501 - successful_jobs_history_limit (int): SuccessfulJobsHistoryLimit is the number of successful jobs to be kept at a time. [optional] # noqa: E501 - suspend (bool): Suspend is a flag that will stop new CronWorkflows from running if set to true. [optional] # noqa: E501 - timezone (str): Timezone is the timezone against which the cron schedule will be calculated, e.g. \"Asia/Tokyo\". Default is machine's local time.. [optional] # noqa: E501 - workflow_metadata (ObjectMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.schedule = schedule - self.workflow_spec = workflow_spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, schedule, workflow_spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowSpec - a model defined in OpenAPI - - Args: - schedule (str): Schedule is a schedule to run the Workflow in Cron format - workflow_spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - concurrency_policy (str): ConcurrencyPolicy is the K8s-style concurrency policy that will be used. [optional] # noqa: E501 - failed_jobs_history_limit (int): FailedJobsHistoryLimit is the number of failed jobs to be kept at a time. [optional] # noqa: E501 - schedules ([str]): Schedules is a list of schedules to run the Workflow in Cron format. [optional] # noqa: E501 - starting_deadline_seconds (int): StartingDeadlineSeconds is the K8s-style deadline that will limit the time a CronWorkflow will be run after its original scheduled time if it is missed.. [optional] # noqa: E501 - stop_strategy (IoArgoprojWorkflowV1alpha1StopStrategy): [optional] # noqa: E501 - successful_jobs_history_limit (int): SuccessfulJobsHistoryLimit is the number of successful jobs to be kept at a time. [optional] # noqa: E501 - suspend (bool): Suspend is a flag that will stop new CronWorkflows from running if set to true. [optional] # noqa: E501 - timezone (str): Timezone is the timezone against which the cron schedule will be calculated, e.g. \"Asia/Tokyo\". Default is machine's local time.. [optional] # noqa: E501 - workflow_metadata (ObjectMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.schedule = schedule - self.workflow_spec = workflow_spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_status.py deleted file mode 100644 index a0d6b293303d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_status.py +++ /dev/null @@ -1,299 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition - from argo_workflows.model.object_reference import ObjectReference - globals()['IoArgoprojWorkflowV1alpha1Condition'] = IoArgoprojWorkflowV1alpha1Condition - globals()['ObjectReference'] = ObjectReference - - -class IoArgoprojWorkflowV1alpha1CronWorkflowStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'active': ([ObjectReference],), # noqa: E501 - 'conditions': ([IoArgoprojWorkflowV1alpha1Condition],), # noqa: E501 - 'failed': (int,), # noqa: E501 - 'last_scheduled_time': (datetime,), # noqa: E501 - 'phase': (str,), # noqa: E501 - 'succeeded': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'active': 'active', # noqa: E501 - 'conditions': 'conditions', # noqa: E501 - 'failed': 'failed', # noqa: E501 - 'last_scheduled_time': 'lastScheduledTime', # noqa: E501 - 'phase': 'phase', # noqa: E501 - 'succeeded': 'succeeded', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, active, conditions, failed, last_scheduled_time, phase, succeeded, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowStatus - a model defined in OpenAPI - - Args: - active ([ObjectReference]): Active is a list of active workflows stemming from this CronWorkflow - conditions ([IoArgoprojWorkflowV1alpha1Condition]): Conditions is a list of conditions the CronWorkflow may have - failed (int): Failed is a counter of how many times a child workflow terminated in failed or errored state - last_scheduled_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. - phase (str): Phase defines the cron workflow phase. It is changed to Stopped when the stopping condition is achieved which stops new CronWorkflows from running - succeeded (int): Succeeded is a counter of how many times the child workflows had success - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.active = active - self.conditions = conditions - self.failed = failed - self.last_scheduled_time = last_scheduled_time - self.phase = phase - self.succeeded = succeeded - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, active, conditions, failed, last_scheduled_time, phase, succeeded, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowStatus - a model defined in OpenAPI - - Args: - active ([ObjectReference]): Active is a list of active workflows stemming from this CronWorkflow - conditions ([IoArgoprojWorkflowV1alpha1Condition]): Conditions is a list of conditions the CronWorkflow may have - failed (int): Failed is a counter of how many times a child workflow terminated in failed or errored state - last_scheduled_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. - phase (str): Phase defines the cron workflow phase. It is changed to Stopped when the stopping condition is achieved which stops new CronWorkflows from running - succeeded (int): Succeeded is a counter of how many times the child workflows had success - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.active = active - self.conditions = conditions - self.failed = failed - self.last_scheduled_time = last_scheduled_time - self.phase = phase - self.succeeded = succeeded - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request.py deleted file mode 100644 index 37deadfe3da8..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_dag_task.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_dag_task.py deleted file mode 100644 index 9ad55a16de98..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_dag_task.py +++ /dev/null @@ -1,329 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments - from argo_workflows.model.io_argoproj_workflow_v1alpha1_continue_on import IoArgoprojWorkflowV1alpha1ContinueOn - from argo_workflows.model.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook - from argo_workflows.model.io_argoproj_workflow_v1alpha1_sequence import IoArgoprojWorkflowV1alpha1Sequence - from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template - from argo_workflows.model.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef - globals()['IoArgoprojWorkflowV1alpha1Arguments'] = IoArgoprojWorkflowV1alpha1Arguments - globals()['IoArgoprojWorkflowV1alpha1ContinueOn'] = IoArgoprojWorkflowV1alpha1ContinueOn - globals()['IoArgoprojWorkflowV1alpha1LifecycleHook'] = IoArgoprojWorkflowV1alpha1LifecycleHook - globals()['IoArgoprojWorkflowV1alpha1Sequence'] = IoArgoprojWorkflowV1alpha1Sequence - globals()['IoArgoprojWorkflowV1alpha1Template'] = IoArgoprojWorkflowV1alpha1Template - globals()['IoArgoprojWorkflowV1alpha1TemplateRef'] = IoArgoprojWorkflowV1alpha1TemplateRef - - -class IoArgoprojWorkflowV1alpha1DAGTask(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'arguments': (IoArgoprojWorkflowV1alpha1Arguments,), # noqa: E501 - 'continue_on': (IoArgoprojWorkflowV1alpha1ContinueOn,), # noqa: E501 - 'dependencies': ([str],), # noqa: E501 - 'depends': (str,), # noqa: E501 - 'hooks': ({str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)},), # noqa: E501 - 'inline': (IoArgoprojWorkflowV1alpha1Template,), # noqa: E501 - 'on_exit': (str,), # noqa: E501 - 'template': (str,), # noqa: E501 - 'template_ref': (IoArgoprojWorkflowV1alpha1TemplateRef,), # noqa: E501 - 'when': (str,), # noqa: E501 - 'with_items': ([dict],), # noqa: E501 - 'with_param': (str,), # noqa: E501 - 'with_sequence': (IoArgoprojWorkflowV1alpha1Sequence,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'arguments': 'arguments', # noqa: E501 - 'continue_on': 'continueOn', # noqa: E501 - 'dependencies': 'dependencies', # noqa: E501 - 'depends': 'depends', # noqa: E501 - 'hooks': 'hooks', # noqa: E501 - 'inline': 'inline', # noqa: E501 - 'on_exit': 'onExit', # noqa: E501 - 'template': 'template', # noqa: E501 - 'template_ref': 'templateRef', # noqa: E501 - 'when': 'when', # noqa: E501 - 'with_items': 'withItems', # noqa: E501 - 'with_param': 'withParam', # noqa: E501 - 'with_sequence': 'withSequence', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1DAGTask - a model defined in OpenAPI - - Args: - name (str): Name is the name of the target - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - continue_on (IoArgoprojWorkflowV1alpha1ContinueOn): [optional] # noqa: E501 - dependencies ([str]): Dependencies are name of other targets which this depends on. [optional] # noqa: E501 - depends (str): Depends are name of other targets which this depends on. [optional] # noqa: E501 - hooks ({str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)}): Hooks hold the lifecycle hook which is invoked at lifecycle of task, irrespective of the success, failure, or error status of the primary task. [optional] # noqa: E501 - inline (IoArgoprojWorkflowV1alpha1Template): [optional] # noqa: E501 - on_exit (str): OnExit is a template reference which is invoked at the end of the template, irrespective of the success, failure, or error of the primary template. DEPRECATED: Use Hooks[exit].Template instead.. [optional] # noqa: E501 - template (str): Name of template to execute. [optional] # noqa: E501 - template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 - when (str): When is an expression in which the task should conditionally execute. [optional] # noqa: E501 - with_items ([dict]): WithItems expands a task into multiple parallel tasks from the items in the list. [optional] # noqa: E501 - with_param (str): WithParam expands a task into multiple parallel tasks from the value in the parameter, which is expected to be a JSON list.. [optional] # noqa: E501 - with_sequence (IoArgoprojWorkflowV1alpha1Sequence): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1DAGTask - a model defined in OpenAPI - - Args: - name (str): Name is the name of the target - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - continue_on (IoArgoprojWorkflowV1alpha1ContinueOn): [optional] # noqa: E501 - dependencies ([str]): Dependencies are name of other targets which this depends on. [optional] # noqa: E501 - depends (str): Depends are name of other targets which this depends on. [optional] # noqa: E501 - hooks ({str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)}): Hooks hold the lifecycle hook which is invoked at lifecycle of task, irrespective of the success, failure, or error status of the primary task. [optional] # noqa: E501 - inline (IoArgoprojWorkflowV1alpha1Template): [optional] # noqa: E501 - on_exit (str): OnExit is a template reference which is invoked at the end of the template, irrespective of the success, failure, or error of the primary template. DEPRECATED: Use Hooks[exit].Template instead.. [optional] # noqa: E501 - template (str): Name of template to execute. [optional] # noqa: E501 - template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 - when (str): When is an expression in which the task should conditionally execute. [optional] # noqa: E501 - with_items ([dict]): WithItems expands a task into multiple parallel tasks from the items in the list. [optional] # noqa: E501 - with_param (str): WithParam expands a task into multiple parallel tasks from the value in the parameter, which is expected to be a JSON list.. [optional] # noqa: E501 - with_sequence (IoArgoprojWorkflowV1alpha1Sequence): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_dag_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_dag_template.py deleted file mode 100644 index 8fcb8976aac7..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_dag_template.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_dag_task import IoArgoprojWorkflowV1alpha1DAGTask - globals()['IoArgoprojWorkflowV1alpha1DAGTask'] = IoArgoprojWorkflowV1alpha1DAGTask - - -class IoArgoprojWorkflowV1alpha1DAGTemplate(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'tasks': ([IoArgoprojWorkflowV1alpha1DAGTask],), # noqa: E501 - 'fail_fast': (bool,), # noqa: E501 - 'target': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'tasks': 'tasks', # noqa: E501 - 'fail_fast': 'failFast', # noqa: E501 - 'target': 'target', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, tasks, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1DAGTemplate - a model defined in OpenAPI - - Args: - tasks ([IoArgoprojWorkflowV1alpha1DAGTask]): Tasks are a list of DAG tasks - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fail_fast (bool): This flag is for DAG logic. The DAG logic has a built-in \"fail fast\" feature to stop scheduling new steps, as soon as it detects that one of the DAG nodes is failed. Then it waits until all DAG nodes are completed before failing the DAG itself. The FailFast flag default is true, if set to false, it will allow a DAG to run all branches of the DAG to completion (either success or failure), regardless of the failed outcomes of branches in the DAG. More info and example about this feature at https://github.com/argoproj/argo-workflows/issues/1442. [optional] # noqa: E501 - target (str): Target are one or more names of targets to execute in a DAG. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.tasks = tasks - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, tasks, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1DAGTemplate - a model defined in OpenAPI - - Args: - tasks ([IoArgoprojWorkflowV1alpha1DAGTask]): Tasks are a list of DAG tasks - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fail_fast (bool): This flag is for DAG logic. The DAG logic has a built-in \"fail fast\" feature to stop scheduling new steps, as soon as it detects that one of the DAG nodes is failed. Then it waits until all DAG nodes are completed before failing the DAG itself. The FailFast flag default is true, if set to false, it will allow a DAG to run all branches of the DAG to completion (either success or failure), regardless of the failed outcomes of branches in the DAG. More info and example about this feature at https://github.com/argoproj/argo-workflows/issues/1442. [optional] # noqa: E501 - target (str): Target are one or more names of targets to execute in a DAG. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.tasks = tasks - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_data.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_data.py deleted file mode 100644 index fdaa1f9a01d5..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_data.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_data_source import IoArgoprojWorkflowV1alpha1DataSource - from argo_workflows.model.io_argoproj_workflow_v1alpha1_transformation_step import IoArgoprojWorkflowV1alpha1TransformationStep - globals()['IoArgoprojWorkflowV1alpha1DataSource'] = IoArgoprojWorkflowV1alpha1DataSource - globals()['IoArgoprojWorkflowV1alpha1TransformationStep'] = IoArgoprojWorkflowV1alpha1TransformationStep - - -class IoArgoprojWorkflowV1alpha1Data(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'source': (IoArgoprojWorkflowV1alpha1DataSource,), # noqa: E501 - 'transformation': ([IoArgoprojWorkflowV1alpha1TransformationStep],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'source': 'source', # noqa: E501 - 'transformation': 'transformation', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, source, transformation, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Data - a model defined in OpenAPI - - Args: - source (IoArgoprojWorkflowV1alpha1DataSource): - transformation ([IoArgoprojWorkflowV1alpha1TransformationStep]): Transformation applies a set of transformations - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.source = source - self.transformation = transformation - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, source, transformation, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Data - a model defined in OpenAPI - - Args: - source (IoArgoprojWorkflowV1alpha1DataSource): - transformation ([IoArgoprojWorkflowV1alpha1TransformationStep]): Transformation applies a set of transformations - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.source = source - self.transformation = transformation - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_data_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_data_source.py deleted file mode 100644 index 904e7e30e013..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_data_source.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_paths import IoArgoprojWorkflowV1alpha1ArtifactPaths - globals()['IoArgoprojWorkflowV1alpha1ArtifactPaths'] = IoArgoprojWorkflowV1alpha1ArtifactPaths - - -class IoArgoprojWorkflowV1alpha1DataSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifact_paths': (IoArgoprojWorkflowV1alpha1ArtifactPaths,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifact_paths': 'artifactPaths', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1DataSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_paths (IoArgoprojWorkflowV1alpha1ArtifactPaths): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1DataSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_paths (IoArgoprojWorkflowV1alpha1ArtifactPaths): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_event.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_event.py deleted file mode 100644 index 2929c17aa709..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_event.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Event(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'selector': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'selector': 'selector', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, selector, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Event - a model defined in OpenAPI - - Args: - selector (str): Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.selector = selector - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, selector, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Event - a model defined in OpenAPI - - Args: - selector (str): Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.selector = selector - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_executor_config.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_executor_config.py deleted file mode 100644 index 7ab0a9a0c578..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_executor_config.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1ExecutorConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'service_account_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'service_account_name': 'serviceAccountName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ExecutorConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - service_account_name (str): ServiceAccountName specifies the service account name of the executor container.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ExecutorConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - service_account_name (str): ServiceAccountName specifies the service account name of the executor container.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gauge.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gauge.py deleted file mode 100644 index b1c122de5d17..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gauge.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Gauge(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'realtime': (bool,), # noqa: E501 - 'value': (str,), # noqa: E501 - 'operation': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'realtime': 'realtime', # noqa: E501 - 'value': 'value', # noqa: E501 - 'operation': 'operation', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, realtime, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Gauge - a model defined in OpenAPI - - Args: - realtime (bool): Realtime emits this metric in real time if applicable - value (str): Value is the value to be used in the operation with the metric's current value. If no operation is set, value is the value of the metric - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - operation (str): Operation defines the operation to apply with value and the metrics' current value. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.realtime = realtime - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, realtime, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Gauge - a model defined in OpenAPI - - Args: - realtime (bool): Realtime emits this metric in real time if applicable - value (str): Value is the value to be used in the operation with the metric's current value. If no operation is set, value is the value of the metric - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - operation (str): Operation defines the operation to apply with value and the metrics' current value. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.realtime = realtime - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gcs_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gcs_artifact.py deleted file mode 100644 index 59f6eff9fda1..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gcs_artifact.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1GCSArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'key': (str,), # noqa: E501 - 'bucket': (str,), # noqa: E501 - 'service_account_key_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'bucket': 'bucket', # noqa: E501 - 'service_account_key_secret': 'serviceAccountKeySecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1GCSArtifact - a model defined in OpenAPI - - Args: - key (str): Key is the path in the bucket where the artifact resides - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - service_account_key_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1GCSArtifact - a model defined in OpenAPI - - Args: - key (str): Key is the path in the bucket where the artifact resides - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - service_account_key_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gcs_artifact_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gcs_artifact_repository.py deleted file mode 100644 index 0f3e5e9d0753..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_gcs_artifact_repository.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1GCSArtifactRepository(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'bucket': (str,), # noqa: E501 - 'key_format': (str,), # noqa: E501 - 'service_account_key_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'bucket': 'bucket', # noqa: E501 - 'key_format': 'keyFormat', # noqa: E501 - 'service_account_key_secret': 'serviceAccountKeySecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1GCSArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - key_format (str): KeyFormat defines the format of how to store keys and can reference workflow variables.. [optional] # noqa: E501 - service_account_key_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1GCSArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - key_format (str): KeyFormat defines the format of how to store keys and can reference workflow variables.. [optional] # noqa: E501 - service_account_key_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_get_user_info_response.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_get_user_info_response.py deleted file mode 100644 index 9d6cb8042bde..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_get_user_info_response.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1GetUserInfoResponse(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'email': (str,), # noqa: E501 - 'email_verified': (bool,), # noqa: E501 - 'groups': ([str],), # noqa: E501 - 'issuer': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'service_account_name': (str,), # noqa: E501 - 'service_account_namespace': (str,), # noqa: E501 - 'subject': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'email': 'email', # noqa: E501 - 'email_verified': 'emailVerified', # noqa: E501 - 'groups': 'groups', # noqa: E501 - 'issuer': 'issuer', # noqa: E501 - 'name': 'name', # noqa: E501 - 'service_account_name': 'serviceAccountName', # noqa: E501 - 'service_account_namespace': 'serviceAccountNamespace', # noqa: E501 - 'subject': 'subject', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1GetUserInfoResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - email (str): [optional] # noqa: E501 - email_verified (bool): [optional] # noqa: E501 - groups ([str]): [optional] # noqa: E501 - issuer (str): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - service_account_name (str): [optional] # noqa: E501 - service_account_namespace (str): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1GetUserInfoResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - email (str): [optional] # noqa: E501 - email_verified (bool): [optional] # noqa: E501 - groups ([str]): [optional] # noqa: E501 - issuer (str): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - service_account_name (str): [optional] # noqa: E501 - service_account_namespace (str): [optional] # noqa: E501 - subject (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_git_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_git_artifact.py deleted file mode 100644 index 41b4b8287f67..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_git_artifact.py +++ /dev/null @@ -1,307 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1GitArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'repo': (str,), # noqa: E501 - 'branch': (str,), # noqa: E501 - 'depth': (int,), # noqa: E501 - 'disable_submodules': (bool,), # noqa: E501 - 'fetch': ([str],), # noqa: E501 - 'insecure_ignore_host_key': (bool,), # noqa: E501 - 'password_secret': (SecretKeySelector,), # noqa: E501 - 'revision': (str,), # noqa: E501 - 'single_branch': (bool,), # noqa: E501 - 'ssh_private_key_secret': (SecretKeySelector,), # noqa: E501 - 'username_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'repo': 'repo', # noqa: E501 - 'branch': 'branch', # noqa: E501 - 'depth': 'depth', # noqa: E501 - 'disable_submodules': 'disableSubmodules', # noqa: E501 - 'fetch': 'fetch', # noqa: E501 - 'insecure_ignore_host_key': 'insecureIgnoreHostKey', # noqa: E501 - 'password_secret': 'passwordSecret', # noqa: E501 - 'revision': 'revision', # noqa: E501 - 'single_branch': 'singleBranch', # noqa: E501 - 'ssh_private_key_secret': 'sshPrivateKeySecret', # noqa: E501 - 'username_secret': 'usernameSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, repo, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1GitArtifact - a model defined in OpenAPI - - Args: - repo (str): Repo is the git repository - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - branch (str): Branch is the branch to fetch when `SingleBranch` is enabled. [optional] # noqa: E501 - depth (int): Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip. [optional] # noqa: E501 - disable_submodules (bool): DisableSubmodules disables submodules during git clone. [optional] # noqa: E501 - fetch ([str]): Fetch specifies a number of refs that should be fetched before checkout. [optional] # noqa: E501 - insecure_ignore_host_key (bool): InsecureIgnoreHostKey disables SSH strict host key checking during git clone. [optional] # noqa: E501 - password_secret (SecretKeySelector): [optional] # noqa: E501 - revision (str): Revision is the git commit, tag, branch to checkout. [optional] # noqa: E501 - single_branch (bool): SingleBranch enables single branch clone, using the `branch` parameter. [optional] # noqa: E501 - ssh_private_key_secret (SecretKeySelector): [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.repo = repo - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, repo, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1GitArtifact - a model defined in OpenAPI - - Args: - repo (str): Repo is the git repository - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - branch (str): Branch is the branch to fetch when `SingleBranch` is enabled. [optional] # noqa: E501 - depth (int): Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip. [optional] # noqa: E501 - disable_submodules (bool): DisableSubmodules disables submodules during git clone. [optional] # noqa: E501 - fetch ([str]): Fetch specifies a number of refs that should be fetched before checkout. [optional] # noqa: E501 - insecure_ignore_host_key (bool): InsecureIgnoreHostKey disables SSH strict host key checking during git clone. [optional] # noqa: E501 - password_secret (SecretKeySelector): [optional] # noqa: E501 - revision (str): Revision is the git commit, tag, branch to checkout. [optional] # noqa: E501 - single_branch (bool): SingleBranch enables single branch clone, using the `branch` parameter. [optional] # noqa: E501 - ssh_private_key_secret (SecretKeySelector): [optional] # noqa: E501 - username_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.repo = repo - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_hdfs_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_hdfs_artifact.py deleted file mode 100644 index 0554d3cff9bd..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_hdfs_artifact.py +++ /dev/null @@ -1,305 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1HDFSArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'path': (str,), # noqa: E501 - 'addresses': ([str],), # noqa: E501 - 'force': (bool,), # noqa: E501 - 'hdfs_user': (str,), # noqa: E501 - 'krb_c_cache_secret': (SecretKeySelector,), # noqa: E501 - 'krb_config_config_map': (ConfigMapKeySelector,), # noqa: E501 - 'krb_keytab_secret': (SecretKeySelector,), # noqa: E501 - 'krb_realm': (str,), # noqa: E501 - 'krb_service_principal_name': (str,), # noqa: E501 - 'krb_username': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'path': 'path', # noqa: E501 - 'addresses': 'addresses', # noqa: E501 - 'force': 'force', # noqa: E501 - 'hdfs_user': 'hdfsUser', # noqa: E501 - 'krb_c_cache_secret': 'krbCCacheSecret', # noqa: E501 - 'krb_config_config_map': 'krbConfigConfigMap', # noqa: E501 - 'krb_keytab_secret': 'krbKeytabSecret', # noqa: E501 - 'krb_realm': 'krbRealm', # noqa: E501 - 'krb_service_principal_name': 'krbServicePrincipalName', # noqa: E501 - 'krb_username': 'krbUsername', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, path, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HDFSArtifact - a model defined in OpenAPI - - Args: - path (str): Path is a file path in HDFS - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - addresses ([str]): Addresses is accessible addresses of HDFS name nodes. [optional] # noqa: E501 - force (bool): Force copies a file forcibly even if it exists. [optional] # noqa: E501 - hdfs_user (str): HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.. [optional] # noqa: E501 - krb_c_cache_secret (SecretKeySelector): [optional] # noqa: E501 - krb_config_config_map (ConfigMapKeySelector): [optional] # noqa: E501 - krb_keytab_secret (SecretKeySelector): [optional] # noqa: E501 - krb_realm (str): KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - krb_service_principal_name (str): KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used.. [optional] # noqa: E501 - krb_username (str): KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, path, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HDFSArtifact - a model defined in OpenAPI - - Args: - path (str): Path is a file path in HDFS - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - addresses ([str]): Addresses is accessible addresses of HDFS name nodes. [optional] # noqa: E501 - force (bool): Force copies a file forcibly even if it exists. [optional] # noqa: E501 - hdfs_user (str): HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.. [optional] # noqa: E501 - krb_c_cache_secret (SecretKeySelector): [optional] # noqa: E501 - krb_config_config_map (ConfigMapKeySelector): [optional] # noqa: E501 - krb_keytab_secret (SecretKeySelector): [optional] # noqa: E501 - krb_realm (str): KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - krb_service_principal_name (str): KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used.. [optional] # noqa: E501 - krb_username (str): KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_hdfs_artifact_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_hdfs_artifact_repository.py deleted file mode 100644 index 2a8f16b957ab..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_hdfs_artifact_repository.py +++ /dev/null @@ -1,299 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1HDFSArtifactRepository(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'addresses': ([str],), # noqa: E501 - 'force': (bool,), # noqa: E501 - 'hdfs_user': (str,), # noqa: E501 - 'krb_c_cache_secret': (SecretKeySelector,), # noqa: E501 - 'krb_config_config_map': (ConfigMapKeySelector,), # noqa: E501 - 'krb_keytab_secret': (SecretKeySelector,), # noqa: E501 - 'krb_realm': (str,), # noqa: E501 - 'krb_service_principal_name': (str,), # noqa: E501 - 'krb_username': (str,), # noqa: E501 - 'path_format': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'addresses': 'addresses', # noqa: E501 - 'force': 'force', # noqa: E501 - 'hdfs_user': 'hdfsUser', # noqa: E501 - 'krb_c_cache_secret': 'krbCCacheSecret', # noqa: E501 - 'krb_config_config_map': 'krbConfigConfigMap', # noqa: E501 - 'krb_keytab_secret': 'krbKeytabSecret', # noqa: E501 - 'krb_realm': 'krbRealm', # noqa: E501 - 'krb_service_principal_name': 'krbServicePrincipalName', # noqa: E501 - 'krb_username': 'krbUsername', # noqa: E501 - 'path_format': 'pathFormat', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HDFSArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - addresses ([str]): Addresses is accessible addresses of HDFS name nodes. [optional] # noqa: E501 - force (bool): Force copies a file forcibly even if it exists. [optional] # noqa: E501 - hdfs_user (str): HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.. [optional] # noqa: E501 - krb_c_cache_secret (SecretKeySelector): [optional] # noqa: E501 - krb_config_config_map (ConfigMapKeySelector): [optional] # noqa: E501 - krb_keytab_secret (SecretKeySelector): [optional] # noqa: E501 - krb_realm (str): KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - krb_service_principal_name (str): KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used.. [optional] # noqa: E501 - krb_username (str): KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - path_format (str): PathFormat is defines the format of path to store a file. Can reference workflow variables. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HDFSArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - addresses ([str]): Addresses is accessible addresses of HDFS name nodes. [optional] # noqa: E501 - force (bool): Force copies a file forcibly even if it exists. [optional] # noqa: E501 - hdfs_user (str): HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.. [optional] # noqa: E501 - krb_c_cache_secret (SecretKeySelector): [optional] # noqa: E501 - krb_config_config_map (ConfigMapKeySelector): [optional] # noqa: E501 - krb_keytab_secret (SecretKeySelector): [optional] # noqa: E501 - krb_realm (str): KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - krb_service_principal_name (str): KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used.. [optional] # noqa: E501 - krb_username (str): KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 - path_format (str): PathFormat is defines the format of path to store a file. Can reference workflow variables. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_header.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_header.py deleted file mode 100644 index 9c60d08cb160..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_header.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Header(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Header - a model defined in OpenAPI - - Args: - name (str): Name is the header name - value (str): Value is the literal value to use for the header - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Header - a model defined in OpenAPI - - Args: - name (str): Name is the header name - value (str): Value is the literal value to use for the header - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_histogram.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_histogram.py deleted file mode 100644 index d5bfd37f942f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_histogram.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Histogram(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'buckets': ([float],), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'buckets': 'buckets', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, buckets, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Histogram - a model defined in OpenAPI - - Args: - buckets ([float]): Buckets is a list of bucket divisors for the histogram - value (str): Value is the value of the metric - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.buckets = buckets - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, buckets, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Histogram - a model defined in OpenAPI - - Args: - buckets ([float]): Buckets is a list of bucket divisors for the histogram - value (str): Value is the value of the metric - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.buckets = buckets - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http.py deleted file mode 100644 index 323d1b17cf2a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http.py +++ /dev/null @@ -1,297 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_body_source import IoArgoprojWorkflowV1alpha1HTTPBodySource - from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_header import IoArgoprojWorkflowV1alpha1HTTPHeader - globals()['IoArgoprojWorkflowV1alpha1HTTPBodySource'] = IoArgoprojWorkflowV1alpha1HTTPBodySource - globals()['IoArgoprojWorkflowV1alpha1HTTPHeader'] = IoArgoprojWorkflowV1alpha1HTTPHeader - - -class IoArgoprojWorkflowV1alpha1HTTP(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'url': (str,), # noqa: E501 - 'body': (str,), # noqa: E501 - 'body_from': (IoArgoprojWorkflowV1alpha1HTTPBodySource,), # noqa: E501 - 'headers': ([IoArgoprojWorkflowV1alpha1HTTPHeader],), # noqa: E501 - 'insecure_skip_verify': (bool,), # noqa: E501 - 'method': (str,), # noqa: E501 - 'success_condition': (str,), # noqa: E501 - 'timeout_seconds': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'url': 'url', # noqa: E501 - 'body': 'body', # noqa: E501 - 'body_from': 'bodyFrom', # noqa: E501 - 'headers': 'headers', # noqa: E501 - 'insecure_skip_verify': 'insecureSkipVerify', # noqa: E501 - 'method': 'method', # noqa: E501 - 'success_condition': 'successCondition', # noqa: E501 - 'timeout_seconds': 'timeoutSeconds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, url, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTP - a model defined in OpenAPI - - Args: - url (str): URL of the HTTP Request - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - body (str): Body is content of the HTTP Request. [optional] # noqa: E501 - body_from (IoArgoprojWorkflowV1alpha1HTTPBodySource): [optional] # noqa: E501 - headers ([IoArgoprojWorkflowV1alpha1HTTPHeader]): Headers are an optional list of headers to send with HTTP requests. [optional] # noqa: E501 - insecure_skip_verify (bool): InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client. [optional] # noqa: E501 - method (str): Method is HTTP methods for HTTP Request. [optional] # noqa: E501 - success_condition (str): SuccessCondition is an expression if evaluated to true is considered successful. [optional] # noqa: E501 - timeout_seconds (int): TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.url = url - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, url, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTP - a model defined in OpenAPI - - Args: - url (str): URL of the HTTP Request - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - body (str): Body is content of the HTTP Request. [optional] # noqa: E501 - body_from (IoArgoprojWorkflowV1alpha1HTTPBodySource): [optional] # noqa: E501 - headers ([IoArgoprojWorkflowV1alpha1HTTPHeader]): Headers are an optional list of headers to send with HTTP requests. [optional] # noqa: E501 - insecure_skip_verify (bool): InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client. [optional] # noqa: E501 - method (str): Method is HTTP methods for HTTP Request. [optional] # noqa: E501 - success_condition (str): SuccessCondition is an expression if evaluated to true is considered successful. [optional] # noqa: E501 - timeout_seconds (int): TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.url = url - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_artifact.py deleted file mode 100644 index ed4214d73b59..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_artifact.py +++ /dev/null @@ -1,277 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_header import IoArgoprojWorkflowV1alpha1Header - from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_auth import IoArgoprojWorkflowV1alpha1HTTPAuth - globals()['IoArgoprojWorkflowV1alpha1HTTPAuth'] = IoArgoprojWorkflowV1alpha1HTTPAuth - globals()['IoArgoprojWorkflowV1alpha1Header'] = IoArgoprojWorkflowV1alpha1Header - - -class IoArgoprojWorkflowV1alpha1HTTPArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'url': (str,), # noqa: E501 - 'auth': (IoArgoprojWorkflowV1alpha1HTTPAuth,), # noqa: E501 - 'headers': ([IoArgoprojWorkflowV1alpha1Header],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'url': 'url', # noqa: E501 - 'auth': 'auth', # noqa: E501 - 'headers': 'headers', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, url, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPArtifact - a model defined in OpenAPI - - Args: - url (str): URL of the artifact - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojWorkflowV1alpha1HTTPAuth): [optional] # noqa: E501 - headers ([IoArgoprojWorkflowV1alpha1Header]): Headers are an optional list of headers to send with HTTP requests for artifacts. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.url = url - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, url, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPArtifact - a model defined in OpenAPI - - Args: - url (str): URL of the artifact - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - auth (IoArgoprojWorkflowV1alpha1HTTPAuth): [optional] # noqa: E501 - headers ([IoArgoprojWorkflowV1alpha1Header]): Headers are an optional list of headers to send with HTTP requests for artifacts. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.url = url - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_auth.py deleted file mode 100644 index 8832c8742b4c..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_auth.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_basic_auth import IoArgoprojWorkflowV1alpha1BasicAuth - from argo_workflows.model.io_argoproj_workflow_v1alpha1_client_cert_auth import IoArgoprojWorkflowV1alpha1ClientCertAuth - from argo_workflows.model.io_argoproj_workflow_v1alpha1_o_auth2_auth import IoArgoprojWorkflowV1alpha1OAuth2Auth - globals()['IoArgoprojWorkflowV1alpha1BasicAuth'] = IoArgoprojWorkflowV1alpha1BasicAuth - globals()['IoArgoprojWorkflowV1alpha1ClientCertAuth'] = IoArgoprojWorkflowV1alpha1ClientCertAuth - globals()['IoArgoprojWorkflowV1alpha1OAuth2Auth'] = IoArgoprojWorkflowV1alpha1OAuth2Auth - - -class IoArgoprojWorkflowV1alpha1HTTPAuth(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'basic_auth': (IoArgoprojWorkflowV1alpha1BasicAuth,), # noqa: E501 - 'client_cert': (IoArgoprojWorkflowV1alpha1ClientCertAuth,), # noqa: E501 - 'oauth2': (IoArgoprojWorkflowV1alpha1OAuth2Auth,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'basic_auth': 'basicAuth', # noqa: E501 - 'client_cert': 'clientCert', # noqa: E501 - 'oauth2': 'oauth2', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - basic_auth (IoArgoprojWorkflowV1alpha1BasicAuth): [optional] # noqa: E501 - client_cert (IoArgoprojWorkflowV1alpha1ClientCertAuth): [optional] # noqa: E501 - oauth2 (IoArgoprojWorkflowV1alpha1OAuth2Auth): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPAuth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - basic_auth (IoArgoprojWorkflowV1alpha1BasicAuth): [optional] # noqa: E501 - client_cert (IoArgoprojWorkflowV1alpha1ClientCertAuth): [optional] # noqa: E501 - oauth2 (IoArgoprojWorkflowV1alpha1OAuth2Auth): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_body_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_body_source.py deleted file mode 100644 index 4f343457d486..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_body_source.py +++ /dev/null @@ -1,260 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1HTTPBodySource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - ('bytes',): { - 'regex': { - 'pattern': r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', # noqa: E501 - }, - }, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'bytes': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'bytes': 'bytes', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPBodySource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - bytes (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPBodySource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - bytes (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_header.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_header.py deleted file mode 100644 index 46d25f86b85f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_header.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_header_source import IoArgoprojWorkflowV1alpha1HTTPHeaderSource - globals()['IoArgoprojWorkflowV1alpha1HTTPHeaderSource'] = IoArgoprojWorkflowV1alpha1HTTPHeaderSource - - -class IoArgoprojWorkflowV1alpha1HTTPHeader(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - 'value_from': (IoArgoprojWorkflowV1alpha1HTTPHeaderSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'value': 'value', # noqa: E501 - 'value_from': 'valueFrom', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPHeader - a model defined in OpenAPI - - Args: - name (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): [optional] # noqa: E501 - value_from (IoArgoprojWorkflowV1alpha1HTTPHeaderSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPHeader - a model defined in OpenAPI - - Args: - name (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): [optional] # noqa: E501 - value_from (IoArgoprojWorkflowV1alpha1HTTPHeaderSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_header_source.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_header_source.py deleted file mode 100644 index 38726ccc4e1b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_http_header_source.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1HTTPHeaderSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'secret_key_ref': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'secret_key_ref': 'secretKeyRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPHeaderSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1HTTPHeaderSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - secret_key_ref (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_info_response.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_info_response.py deleted file mode 100644 index 73b42932b240..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_info_response.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_column import IoArgoprojWorkflowV1alpha1Column - from argo_workflows.model.io_argoproj_workflow_v1alpha1_link import IoArgoprojWorkflowV1alpha1Link - globals()['IoArgoprojWorkflowV1alpha1Column'] = IoArgoprojWorkflowV1alpha1Column - globals()['IoArgoprojWorkflowV1alpha1Link'] = IoArgoprojWorkflowV1alpha1Link - - -class IoArgoprojWorkflowV1alpha1InfoResponse(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'columns': ([IoArgoprojWorkflowV1alpha1Column],), # noqa: E501 - 'links': ([IoArgoprojWorkflowV1alpha1Link],), # noqa: E501 - 'managed_namespace': (str,), # noqa: E501 - 'modals': ({str: (bool,)},), # noqa: E501 - 'nav_color': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'columns': 'columns', # noqa: E501 - 'links': 'links', # noqa: E501 - 'managed_namespace': 'managedNamespace', # noqa: E501 - 'modals': 'modals', # noqa: E501 - 'nav_color': 'navColor', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1InfoResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - columns ([IoArgoprojWorkflowV1alpha1Column]): [optional] # noqa: E501 - links ([IoArgoprojWorkflowV1alpha1Link]): [optional] # noqa: E501 - managed_namespace (str): [optional] # noqa: E501 - modals ({str: (bool,)}): [optional] # noqa: E501 - nav_color (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1InfoResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - columns ([IoArgoprojWorkflowV1alpha1Column]): [optional] # noqa: E501 - links ([IoArgoprojWorkflowV1alpha1Link]): [optional] # noqa: E501 - managed_namespace (str): [optional] # noqa: E501 - modals ({str: (bool,)}): [optional] # noqa: E501 - nav_color (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_inputs.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_inputs.py deleted file mode 100644 index 27a2c1d335d8..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_inputs.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter - globals()['IoArgoprojWorkflowV1alpha1Artifact'] = IoArgoprojWorkflowV1alpha1Artifact - globals()['IoArgoprojWorkflowV1alpha1Parameter'] = IoArgoprojWorkflowV1alpha1Parameter - - -class IoArgoprojWorkflowV1alpha1Inputs(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifacts': ([IoArgoprojWorkflowV1alpha1Artifact],), # noqa: E501 - 'parameters': ([IoArgoprojWorkflowV1alpha1Parameter],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifacts': 'artifacts', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Inputs - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifacts ([IoArgoprojWorkflowV1alpha1Artifact]): Artifact are a list of artifacts passed as inputs. [optional] # noqa: E501 - parameters ([IoArgoprojWorkflowV1alpha1Parameter]): Parameters are a list of parameters passed as inputs. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Inputs - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifacts ([IoArgoprojWorkflowV1alpha1Artifact]): Artifact are a list of artifacts passed as inputs. [optional] # noqa: E501 - parameters ([IoArgoprojWorkflowV1alpha1Parameter]): Parameters are a list of parameters passed as inputs. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_keys.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_keys.py deleted file mode 100644 index 618bb73c11ce..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_keys.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1LabelKeys(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'items': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LabelKeys - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LabelKeys - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_value_from.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_value_from.py deleted file mode 100644 index cf586092dd9e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_value_from.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1LabelValueFrom(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'expression': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'expression': 'expression', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, expression, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LabelValueFrom - a model defined in OpenAPI - - Args: - expression (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.expression = expression - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, expression, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LabelValueFrom - a model defined in OpenAPI - - Args: - expression (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.expression = expression - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_values.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_values.py deleted file mode 100644 index 7a36cb778eba..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_label_values.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1LabelValues(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'items': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LabelValues - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LabelValues - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lifecycle_hook.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lifecycle_hook.py deleted file mode 100644 index a28912825b4e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lifecycle_hook.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments - from argo_workflows.model.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef - globals()['IoArgoprojWorkflowV1alpha1Arguments'] = IoArgoprojWorkflowV1alpha1Arguments - globals()['IoArgoprojWorkflowV1alpha1TemplateRef'] = IoArgoprojWorkflowV1alpha1TemplateRef - - -class IoArgoprojWorkflowV1alpha1LifecycleHook(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'arguments': (IoArgoprojWorkflowV1alpha1Arguments,), # noqa: E501 - 'expression': (str,), # noqa: E501 - 'template': (str,), # noqa: E501 - 'template_ref': (IoArgoprojWorkflowV1alpha1TemplateRef,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'arguments': 'arguments', # noqa: E501 - 'expression': 'expression', # noqa: E501 - 'template': 'template', # noqa: E501 - 'template_ref': 'templateRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LifecycleHook - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - expression (str): Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored. [optional] # noqa: E501 - template (str): Template is the name of the template to execute by the hook. [optional] # noqa: E501 - template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LifecycleHook - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - expression (str): Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored. [optional] # noqa: E501 - template (str): Template is the name of the template to execute by the hook. [optional] # noqa: E501 - template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_link.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_link.py deleted file mode 100644 index 600bdfccabd2..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_link.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Link(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'scope': (str,), # noqa: E501 - 'url': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'scope': 'scope', # noqa: E501 - 'url': 'url', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, scope, url, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Link - a model defined in OpenAPI - - Args: - name (str): The name of the link, E.g. \"Workflow Logs\" or \"Pod Logs\" - scope (str): \"workflow\", \"pod\", \"pod-logs\", \"event-source-logs\", \"sensor-logs\", \"workflow-list\" or \"chat\" - url (str): The URL. Can contain \"${metadata.namespace}\", \"${metadata.name}\", \"${status.startedAt}\", \"${status.finishedAt}\" or any other element in workflow yaml, e.g. \"${io.argoproj.workflow.v1alpha1.metadata.annotations.userDefinedKey}\" - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.scope = scope - self.url = url - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, scope, url, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Link - a model defined in OpenAPI - - Args: - name (str): The name of the link, E.g. \"Workflow Logs\" or \"Pod Logs\" - scope (str): \"workflow\", \"pod\", \"pod-logs\", \"event-source-logs\", \"sensor-logs\", \"workflow-list\" or \"chat\" - url (str): The URL. Can contain \"${metadata.namespace}\", \"${metadata.name}\", \"${status.startedAt}\", \"${status.finishedAt}\" or any other element in workflow yaml, e.g. \"${io.argoproj.workflow.v1alpha1.metadata.annotations.userDefinedKey}\" - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.scope = scope - self.url = url - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lint_cron_workflow_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lint_cron_workflow_request.py deleted file mode 100644 index ab27b6454733..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_lint_cron_workflow_request.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow - globals()['IoArgoprojWorkflowV1alpha1CronWorkflow'] = IoArgoprojWorkflowV1alpha1CronWorkflow - - -class IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'cron_workflow': (IoArgoprojWorkflowV1alpha1CronWorkflow,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cron_workflow': 'cronWorkflow', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cron_workflow (IoArgoprojWorkflowV1alpha1CronWorkflow): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cron_workflow (IoArgoprojWorkflowV1alpha1CronWorkflow): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_log_entry.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_log_entry.py deleted file mode 100644 index 914013fc4227..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_log_entry.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1LogEntry(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'content': (str,), # noqa: E501 - 'pod_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'content': 'content', # noqa: E501 - 'pod_name': 'podName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - content (str): [optional] # noqa: E501 - pod_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1LogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - content (str): [optional] # noqa: E501 - pod_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_manifest_from.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_manifest_from.py deleted file mode 100644 index 97ac065c989c..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_manifest_from.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact - globals()['IoArgoprojWorkflowV1alpha1Artifact'] = IoArgoprojWorkflowV1alpha1Artifact - - -class IoArgoprojWorkflowV1alpha1ManifestFrom(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifact': (IoArgoprojWorkflowV1alpha1Artifact,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifact': 'artifact', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, artifact, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ManifestFrom - a model defined in OpenAPI - - Args: - artifact (IoArgoprojWorkflowV1alpha1Artifact): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.artifact = artifact - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, artifact, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ManifestFrom - a model defined in OpenAPI - - Args: - artifact (IoArgoprojWorkflowV1alpha1Artifact): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.artifact = artifact - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_memoization_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_memoization_status.py deleted file mode 100644 index ba0a053daf40..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_memoization_status.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1MemoizationStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'cache_name': (str,), # noqa: E501 - 'hit': (bool,), # noqa: E501 - 'key': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cache_name': 'cacheName', # noqa: E501 - 'hit': 'hit', # noqa: E501 - 'key': 'key', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, cache_name, hit, key, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1MemoizationStatus - a model defined in OpenAPI - - Args: - cache_name (str): Cache is the name of the cache that was used - hit (bool): Hit indicates whether this node was created from a cache entry - key (str): Key is the name of the key used for this node's cache - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.cache_name = cache_name - self.hit = hit - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, cache_name, hit, key, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1MemoizationStatus - a model defined in OpenAPI - - Args: - cache_name (str): Cache is the name of the cache that was used - hit (bool): Hit indicates whether this node was created from a cache entry - key (str): Key is the name of the key used for this node's cache - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.cache_name = cache_name - self.hit = hit - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_memoize.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_memoize.py deleted file mode 100644 index 68ee63676cf3..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_memoize.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cache import IoArgoprojWorkflowV1alpha1Cache - globals()['IoArgoprojWorkflowV1alpha1Cache'] = IoArgoprojWorkflowV1alpha1Cache - - -class IoArgoprojWorkflowV1alpha1Memoize(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'cache': (IoArgoprojWorkflowV1alpha1Cache,), # noqa: E501 - 'key': (str,), # noqa: E501 - 'max_age': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cache': 'cache', # noqa: E501 - 'key': 'key', # noqa: E501 - 'max_age': 'maxAge', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, cache, key, max_age, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Memoize - a model defined in OpenAPI - - Args: - cache (IoArgoprojWorkflowV1alpha1Cache): - key (str): Key is the key to use as the caching key - max_age (str): MaxAge is the maximum age (e.g. \"180s\", \"24h\") of an entry that is still considered valid. If an entry is older than the MaxAge, it will be ignored. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.cache = cache - self.key = key - self.max_age = max_age - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, cache, key, max_age, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Memoize - a model defined in OpenAPI - - Args: - cache (IoArgoprojWorkflowV1alpha1Cache): - key (str): Key is the key to use as the caching key - max_age (str): MaxAge is the maximum age (e.g. \"180s\", \"24h\") of an entry that is still considered valid. If an entry is older than the MaxAge, it will be ignored. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.cache = cache - self.key = key - self.max_age = max_age - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metadata.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metadata.py deleted file mode 100644 index 5927b779c6bb..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metadata.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Metadata(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'annotations': ({str: (str,)},), # noqa: E501 - 'labels': ({str: (str,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'annotations': 'annotations', # noqa: E501 - 'labels': 'labels', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Metadata - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Metadata - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metric_label.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metric_label.py deleted file mode 100644 index a954f8d51f30..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metric_label.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1MetricLabel(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1MetricLabel - a model defined in OpenAPI - - Args: - key (str): - value (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, value, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1MetricLabel - a model defined in OpenAPI - - Args: - key (str): - value (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metrics.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metrics.py deleted file mode 100644 index b7a4022ebc6d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_metrics.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_prometheus import IoArgoprojWorkflowV1alpha1Prometheus - globals()['IoArgoprojWorkflowV1alpha1Prometheus'] = IoArgoprojWorkflowV1alpha1Prometheus - - -class IoArgoprojWorkflowV1alpha1Metrics(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'prometheus': ([IoArgoprojWorkflowV1alpha1Prometheus],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'prometheus': 'prometheus', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, prometheus, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Metrics - a model defined in OpenAPI - - Args: - prometheus ([IoArgoprojWorkflowV1alpha1Prometheus]): Prometheus is a list of prometheus metrics to be emitted - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.prometheus = prometheus - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, prometheus, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Metrics - a model defined in OpenAPI - - Args: - prometheus ([IoArgoprojWorkflowV1alpha1Prometheus]): Prometheus is a list of prometheus metrics to be emitted - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.prometheus = prometheus - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex.py deleted file mode 100644 index 0a15d05c6ba2..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Mutex(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Mutex - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): name of the mutex. [optional] # noqa: E501 - namespace (str): Namespace is the namespace of the mutex, default: [namespace of workflow]. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Mutex - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): name of the mutex. [optional] # noqa: E501 - namespace (str): Namespace is the namespace of the mutex, default: [namespace of workflow]. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex_holding.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex_holding.py deleted file mode 100644 index aaf3303520d1..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex_holding.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1MutexHolding(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'holder': (str,), # noqa: E501 - 'mutex': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'holder': 'holder', # noqa: E501 - 'mutex': 'mutex', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1MutexHolding - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - holder (str): Holder is a reference to the object which holds the Mutex. Holding Scenario: 1. Current workflow's NodeID which is holding the lock. e.g: ${NodeID} Waiting Scenario: 1. Current workflow or other workflow NodeID which is holding the lock. e.g: ${WorkflowName}/${NodeID}. [optional] # noqa: E501 - mutex (str): Reference for the mutex e.g: ${namespace}/mutex/${mutexName}. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1MutexHolding - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - holder (str): Holder is a reference to the object which holds the Mutex. Holding Scenario: 1. Current workflow's NodeID which is holding the lock. e.g: ${NodeID} Waiting Scenario: 1. Current workflow or other workflow NodeID which is holding the lock. e.g: ${WorkflowName}/${NodeID}. [optional] # noqa: E501 - mutex (str): Reference for the mutex e.g: ${namespace}/mutex/${mutexName}. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex_status.py deleted file mode 100644 index 09c5e9239a39..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_mutex_status.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_mutex_holding import IoArgoprojWorkflowV1alpha1MutexHolding - globals()['IoArgoprojWorkflowV1alpha1MutexHolding'] = IoArgoprojWorkflowV1alpha1MutexHolding - - -class IoArgoprojWorkflowV1alpha1MutexStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'holding': ([IoArgoprojWorkflowV1alpha1MutexHolding],), # noqa: E501 - 'waiting': ([IoArgoprojWorkflowV1alpha1MutexHolding],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'holding': 'holding', # noqa: E501 - 'waiting': 'waiting', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1MutexStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - holding ([IoArgoprojWorkflowV1alpha1MutexHolding]): Holding is a list of mutexes and their respective objects that are held by mutex lock for this io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - waiting ([IoArgoprojWorkflowV1alpha1MutexHolding]): Waiting is a list of mutexes and their respective objects this workflow is waiting for.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1MutexStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - holding ([IoArgoprojWorkflowV1alpha1MutexHolding]): Holding is a list of mutexes and their respective objects that are held by mutex lock for this io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - waiting ([IoArgoprojWorkflowV1alpha1MutexHolding]): Waiting is a list of mutexes and their respective objects this workflow is waiting for.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_flag.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_flag.py deleted file mode 100644 index a39d3bbbe135..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_flag.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1NodeFlag(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'hooked': (bool,), # noqa: E501 - 'retried': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'hooked': 'hooked', # noqa: E501 - 'retried': 'retried', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1NodeFlag - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - hooked (bool): Hooked tracks whether or not this node was triggered by hook or onExit. [optional] # noqa: E501 - retried (bool): Retried tracks whether or not this node was retried by retryStrategy. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1NodeFlag - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - hooked (bool): Hooked tracks whether or not this node was triggered by hook or onExit. [optional] # noqa: E501 - retried (bool): Retried tracks whether or not this node was retried by retryStrategy. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_result.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_result.py deleted file mode 100644 index cba548a9cc84..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_result.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs - globals()['IoArgoprojWorkflowV1alpha1Outputs'] = IoArgoprojWorkflowV1alpha1Outputs - - -class IoArgoprojWorkflowV1alpha1NodeResult(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'message': (str,), # noqa: E501 - 'outputs': (IoArgoprojWorkflowV1alpha1Outputs,), # noqa: E501 - 'phase': (str,), # noqa: E501 - 'progress': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'message': 'message', # noqa: E501 - 'outputs': 'outputs', # noqa: E501 - 'phase': 'phase', # noqa: E501 - 'progress': 'progress', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1NodeResult - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - message (str): [optional] # noqa: E501 - outputs (IoArgoprojWorkflowV1alpha1Outputs): [optional] # noqa: E501 - phase (str): [optional] # noqa: E501 - progress (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1NodeResult - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - message (str): [optional] # noqa: E501 - outputs (IoArgoprojWorkflowV1alpha1Outputs): [optional] # noqa: E501 - phase (str): [optional] # noqa: E501 - progress (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_status.py deleted file mode 100644 index 7b385d2b3ca8..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_status.py +++ /dev/null @@ -1,377 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_inputs import IoArgoprojWorkflowV1alpha1Inputs - from argo_workflows.model.io_argoproj_workflow_v1alpha1_memoization_status import IoArgoprojWorkflowV1alpha1MemoizationStatus - from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_flag import IoArgoprojWorkflowV1alpha1NodeFlag - from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_synchronization_status import IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus - from argo_workflows.model.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs - from argo_workflows.model.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef - globals()['IoArgoprojWorkflowV1alpha1Inputs'] = IoArgoprojWorkflowV1alpha1Inputs - globals()['IoArgoprojWorkflowV1alpha1MemoizationStatus'] = IoArgoprojWorkflowV1alpha1MemoizationStatus - globals()['IoArgoprojWorkflowV1alpha1NodeFlag'] = IoArgoprojWorkflowV1alpha1NodeFlag - globals()['IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus'] = IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus - globals()['IoArgoprojWorkflowV1alpha1Outputs'] = IoArgoprojWorkflowV1alpha1Outputs - globals()['IoArgoprojWorkflowV1alpha1TemplateRef'] = IoArgoprojWorkflowV1alpha1TemplateRef - - -class IoArgoprojWorkflowV1alpha1NodeStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'id': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - 'boundary_id': (str,), # noqa: E501 - 'children': ([str],), # noqa: E501 - 'daemoned': (bool,), # noqa: E501 - 'display_name': (str,), # noqa: E501 - 'estimated_duration': (int,), # noqa: E501 - 'finished_at': (datetime,), # noqa: E501 - 'host_node_name': (str,), # noqa: E501 - 'inputs': (IoArgoprojWorkflowV1alpha1Inputs,), # noqa: E501 - 'memoization_status': (IoArgoprojWorkflowV1alpha1MemoizationStatus,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'node_flag': (IoArgoprojWorkflowV1alpha1NodeFlag,), # noqa: E501 - 'outbound_nodes': ([str],), # noqa: E501 - 'outputs': (IoArgoprojWorkflowV1alpha1Outputs,), # noqa: E501 - 'phase': (str,), # noqa: E501 - 'pod_ip': (str,), # noqa: E501 - 'progress': (str,), # noqa: E501 - 'resources_duration': ({str: (int,)},), # noqa: E501 - 'started_at': (datetime,), # noqa: E501 - 'synchronization_status': (IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus,), # noqa: E501 - 'template_name': (str,), # noqa: E501 - 'template_ref': (IoArgoprojWorkflowV1alpha1TemplateRef,), # noqa: E501 - 'template_scope': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'id': 'id', # noqa: E501 - 'name': 'name', # noqa: E501 - 'type': 'type', # noqa: E501 - 'boundary_id': 'boundaryID', # noqa: E501 - 'children': 'children', # noqa: E501 - 'daemoned': 'daemoned', # noqa: E501 - 'display_name': 'displayName', # noqa: E501 - 'estimated_duration': 'estimatedDuration', # noqa: E501 - 'finished_at': 'finishedAt', # noqa: E501 - 'host_node_name': 'hostNodeName', # noqa: E501 - 'inputs': 'inputs', # noqa: E501 - 'memoization_status': 'memoizationStatus', # noqa: E501 - 'message': 'message', # noqa: E501 - 'node_flag': 'nodeFlag', # noqa: E501 - 'outbound_nodes': 'outboundNodes', # noqa: E501 - 'outputs': 'outputs', # noqa: E501 - 'phase': 'phase', # noqa: E501 - 'pod_ip': 'podIP', # noqa: E501 - 'progress': 'progress', # noqa: E501 - 'resources_duration': 'resourcesDuration', # noqa: E501 - 'started_at': 'startedAt', # noqa: E501 - 'synchronization_status': 'synchronizationStatus', # noqa: E501 - 'template_name': 'templateName', # noqa: E501 - 'template_ref': 'templateRef', # noqa: E501 - 'template_scope': 'templateScope', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, id, name, type, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1NodeStatus - a model defined in OpenAPI - - Args: - id (str): ID is a unique identifier of a node within the worklow It is implemented as a hash of the node name, which makes the ID deterministic - name (str): Name is unique name in the node tree used to generate the node ID - type (str): Type indicates type of node - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - boundary_id (str): BoundaryID indicates the node ID of the associated template root node in which this node belongs to. [optional] # noqa: E501 - children ([str]): Children is a list of child node IDs. [optional] # noqa: E501 - daemoned (bool): Daemoned tracks whether or not this node was daemoned and need to be terminated. [optional] # noqa: E501 - display_name (str): DisplayName is a human readable representation of the node. Unique within a template boundary. [optional] # noqa: E501 - estimated_duration (int): EstimatedDuration in seconds.. [optional] # noqa: E501 - finished_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - host_node_name (str): HostNodeName name of the Kubernetes node on which the Pod is running, if applicable. [optional] # noqa: E501 - inputs (IoArgoprojWorkflowV1alpha1Inputs): [optional] # noqa: E501 - memoization_status (IoArgoprojWorkflowV1alpha1MemoizationStatus): [optional] # noqa: E501 - message (str): A human readable message indicating details about why the node is in this condition.. [optional] # noqa: E501 - node_flag (IoArgoprojWorkflowV1alpha1NodeFlag): [optional] # noqa: E501 - outbound_nodes ([str]): OutboundNodes tracks the node IDs which are considered \"outbound\" nodes to a template invocation. For every invocation of a template, there are nodes which we considered as \"outbound\". Essentially, these are last nodes in the execution sequence to run, before the template is considered completed. These nodes are then connected as parents to a following step. In the case of single pod steps (i.e. container, script, resource templates), this list will be nil since the pod itself is already considered the \"outbound\" node. In the case of DAGs, outbound nodes are the \"target\" tasks (tasks with no children). In the case of steps, outbound nodes are all the containers involved in the last step group. NOTE: since templates are composable, the list of outbound nodes are carried upwards when a DAG/steps template invokes another DAG/steps template. In other words, the outbound nodes of a template, will be a superset of the outbound nodes of its last children.. [optional] # noqa: E501 - outputs (IoArgoprojWorkflowV1alpha1Outputs): [optional] # noqa: E501 - phase (str): Phase a simple, high-level summary of where the node is in its lifecycle. Can be used as a state machine. Will be one of these values \"Pending\", \"Running\" before the node is completed, or \"Succeeded\", \"Skipped\", \"Failed\", \"Error\", or \"Omitted\" as a final state.. [optional] # noqa: E501 - pod_ip (str): PodIP captures the IP of the pod for daemoned steps. [optional] # noqa: E501 - progress (str): Progress to completion. [optional] # noqa: E501 - resources_duration ({str: (int,)}): ResourcesDuration is indicative, but not accurate, resource duration. This is populated when the nodes completes.. [optional] # noqa: E501 - started_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - synchronization_status (IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus): [optional] # noqa: E501 - template_name (str): TemplateName is the template name which this node corresponds to. Not applicable to virtual nodes (e.g. Retry, StepGroup). [optional] # noqa: E501 - template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 - template_scope (str): TemplateScope is the template scope in which the template of this node was retrieved.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.id = id - self.name = name - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, id, name, type, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1NodeStatus - a model defined in OpenAPI - - Args: - id (str): ID is a unique identifier of a node within the worklow It is implemented as a hash of the node name, which makes the ID deterministic - name (str): Name is unique name in the node tree used to generate the node ID - type (str): Type indicates type of node - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - boundary_id (str): BoundaryID indicates the node ID of the associated template root node in which this node belongs to. [optional] # noqa: E501 - children ([str]): Children is a list of child node IDs. [optional] # noqa: E501 - daemoned (bool): Daemoned tracks whether or not this node was daemoned and need to be terminated. [optional] # noqa: E501 - display_name (str): DisplayName is a human readable representation of the node. Unique within a template boundary. [optional] # noqa: E501 - estimated_duration (int): EstimatedDuration in seconds.. [optional] # noqa: E501 - finished_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - host_node_name (str): HostNodeName name of the Kubernetes node on which the Pod is running, if applicable. [optional] # noqa: E501 - inputs (IoArgoprojWorkflowV1alpha1Inputs): [optional] # noqa: E501 - memoization_status (IoArgoprojWorkflowV1alpha1MemoizationStatus): [optional] # noqa: E501 - message (str): A human readable message indicating details about why the node is in this condition.. [optional] # noqa: E501 - node_flag (IoArgoprojWorkflowV1alpha1NodeFlag): [optional] # noqa: E501 - outbound_nodes ([str]): OutboundNodes tracks the node IDs which are considered \"outbound\" nodes to a template invocation. For every invocation of a template, there are nodes which we considered as \"outbound\". Essentially, these are last nodes in the execution sequence to run, before the template is considered completed. These nodes are then connected as parents to a following step. In the case of single pod steps (i.e. container, script, resource templates), this list will be nil since the pod itself is already considered the \"outbound\" node. In the case of DAGs, outbound nodes are the \"target\" tasks (tasks with no children). In the case of steps, outbound nodes are all the containers involved in the last step group. NOTE: since templates are composable, the list of outbound nodes are carried upwards when a DAG/steps template invokes another DAG/steps template. In other words, the outbound nodes of a template, will be a superset of the outbound nodes of its last children.. [optional] # noqa: E501 - outputs (IoArgoprojWorkflowV1alpha1Outputs): [optional] # noqa: E501 - phase (str): Phase a simple, high-level summary of where the node is in its lifecycle. Can be used as a state machine. Will be one of these values \"Pending\", \"Running\" before the node is completed, or \"Succeeded\", \"Skipped\", \"Failed\", \"Error\", or \"Omitted\" as a final state.. [optional] # noqa: E501 - pod_ip (str): PodIP captures the IP of the pod for daemoned steps. [optional] # noqa: E501 - progress (str): Progress to completion. [optional] # noqa: E501 - resources_duration ({str: (int,)}): ResourcesDuration is indicative, but not accurate, resource duration. This is populated when the nodes completes.. [optional] # noqa: E501 - started_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - synchronization_status (IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus): [optional] # noqa: E501 - template_name (str): TemplateName is the template name which this node corresponds to. Not applicable to virtual nodes (e.g. Retry, StepGroup). [optional] # noqa: E501 - template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 - template_scope (str): TemplateScope is the template scope in which the template of this node was retrieved.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.id = id - self.name = name - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_synchronization_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_synchronization_status.py deleted file mode 100644 index 8747678d56a0..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_node_synchronization_status.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'waiting': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'waiting': 'waiting', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - waiting (str): Waiting is the name of the lock that this node is waiting for. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - waiting (str): Waiting is the name of the lock that this node is waiting for. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_auth.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_auth.py deleted file mode 100644 index 03bf77905ef0..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_auth.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param import IoArgoprojWorkflowV1alpha1OAuth2EndpointParam - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojWorkflowV1alpha1OAuth2EndpointParam'] = IoArgoprojWorkflowV1alpha1OAuth2EndpointParam - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1OAuth2Auth(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'client_id_secret': (SecretKeySelector,), # noqa: E501 - 'client_secret_secret': (SecretKeySelector,), # noqa: E501 - 'endpoint_params': ([IoArgoprojWorkflowV1alpha1OAuth2EndpointParam],), # noqa: E501 - 'scopes': ([str],), # noqa: E501 - 'token_url_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'client_id_secret': 'clientIDSecret', # noqa: E501 - 'client_secret_secret': 'clientSecretSecret', # noqa: E501 - 'endpoint_params': 'endpointParams', # noqa: E501 - 'scopes': 'scopes', # noqa: E501 - 'token_url_secret': 'tokenURLSecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OAuth2Auth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - client_id_secret (SecretKeySelector): [optional] # noqa: E501 - client_secret_secret (SecretKeySelector): [optional] # noqa: E501 - endpoint_params ([IoArgoprojWorkflowV1alpha1OAuth2EndpointParam]): [optional] # noqa: E501 - scopes ([str]): [optional] # noqa: E501 - token_url_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OAuth2Auth - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - client_id_secret (SecretKeySelector): [optional] # noqa: E501 - client_secret_secret (SecretKeySelector): [optional] # noqa: E501 - endpoint_params ([IoArgoprojWorkflowV1alpha1OAuth2EndpointParam]): [optional] # noqa: E501 - scopes ([str]): [optional] # noqa: E501 - token_url_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py deleted file mode 100644 index 1c6e6c5287a8..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1OAuth2EndpointParam(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OAuth2EndpointParam - a model defined in OpenAPI - - Args: - key (str): Name is the header name - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): Value is the literal value to use for the header. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OAuth2EndpointParam - a model defined in OpenAPI - - Args: - key (str): Name is the header name - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - value (str): Value is the literal value to use for the header. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_artifact.py deleted file mode 100644 index ba528607caa2..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_artifact.py +++ /dev/null @@ -1,301 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_lifecycle_rule import IoArgoprojWorkflowV1alpha1OSSLifecycleRule - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojWorkflowV1alpha1OSSLifecycleRule'] = IoArgoprojWorkflowV1alpha1OSSLifecycleRule - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1OSSArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'key': (str,), # noqa: E501 - 'access_key_secret': (SecretKeySelector,), # noqa: E501 - 'bucket': (str,), # noqa: E501 - 'create_bucket_if_not_present': (bool,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'lifecycle_rule': (IoArgoprojWorkflowV1alpha1OSSLifecycleRule,), # noqa: E501 - 'secret_key_secret': (SecretKeySelector,), # noqa: E501 - 'security_token': (str,), # noqa: E501 - 'use_sdk_creds': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'access_key_secret': 'accessKeySecret', # noqa: E501 - 'bucket': 'bucket', # noqa: E501 - 'create_bucket_if_not_present': 'createBucketIfNotPresent', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'lifecycle_rule': 'lifecycleRule', # noqa: E501 - 'secret_key_secret': 'secretKeySecret', # noqa: E501 - 'security_token': 'securityToken', # noqa: E501 - 'use_sdk_creds': 'useSDKCreds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OSSArtifact - a model defined in OpenAPI - - Args: - key (str): Key is the path in the bucket where the artifact resides - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key_secret (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - create_bucket_if_not_present (bool): CreateBucketIfNotPresent tells the driver to attempt to create the OSS bucket for output artifacts, if it doesn't exist. [optional] # noqa: E501 - endpoint (str): Endpoint is the hostname of the bucket endpoint. [optional] # noqa: E501 - lifecycle_rule (IoArgoprojWorkflowV1alpha1OSSLifecycleRule): [optional] # noqa: E501 - secret_key_secret (SecretKeySelector): [optional] # noqa: E501 - security_token (str): SecurityToken is the user's temporary security token. For more details, check out: https://www.alibabacloud.com/help/doc-detail/100624.htm. [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OSSArtifact - a model defined in OpenAPI - - Args: - key (str): Key is the path in the bucket where the artifact resides - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key_secret (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - create_bucket_if_not_present (bool): CreateBucketIfNotPresent tells the driver to attempt to create the OSS bucket for output artifacts, if it doesn't exist. [optional] # noqa: E501 - endpoint (str): Endpoint is the hostname of the bucket endpoint. [optional] # noqa: E501 - lifecycle_rule (IoArgoprojWorkflowV1alpha1OSSLifecycleRule): [optional] # noqa: E501 - secret_key_secret (SecretKeySelector): [optional] # noqa: E501 - security_token (str): SecurityToken is the user's temporary security token. For more details, check out: https://www.alibabacloud.com/help/doc-detail/100624.htm. [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_artifact_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_artifact_repository.py deleted file mode 100644 index 0467bbd3e863..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_artifact_repository.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_lifecycle_rule import IoArgoprojWorkflowV1alpha1OSSLifecycleRule - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojWorkflowV1alpha1OSSLifecycleRule'] = IoArgoprojWorkflowV1alpha1OSSLifecycleRule - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1OSSArtifactRepository(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_key_secret': (SecretKeySelector,), # noqa: E501 - 'bucket': (str,), # noqa: E501 - 'create_bucket_if_not_present': (bool,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'key_format': (str,), # noqa: E501 - 'lifecycle_rule': (IoArgoprojWorkflowV1alpha1OSSLifecycleRule,), # noqa: E501 - 'secret_key_secret': (SecretKeySelector,), # noqa: E501 - 'security_token': (str,), # noqa: E501 - 'use_sdk_creds': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_key_secret': 'accessKeySecret', # noqa: E501 - 'bucket': 'bucket', # noqa: E501 - 'create_bucket_if_not_present': 'createBucketIfNotPresent', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'key_format': 'keyFormat', # noqa: E501 - 'lifecycle_rule': 'lifecycleRule', # noqa: E501 - 'secret_key_secret': 'secretKeySecret', # noqa: E501 - 'security_token': 'securityToken', # noqa: E501 - 'use_sdk_creds': 'useSDKCreds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OSSArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key_secret (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - create_bucket_if_not_present (bool): CreateBucketIfNotPresent tells the driver to attempt to create the OSS bucket for output artifacts, if it doesn't exist. [optional] # noqa: E501 - endpoint (str): Endpoint is the hostname of the bucket endpoint. [optional] # noqa: E501 - key_format (str): KeyFormat defines the format of how to store keys and can reference workflow variables.. [optional] # noqa: E501 - lifecycle_rule (IoArgoprojWorkflowV1alpha1OSSLifecycleRule): [optional] # noqa: E501 - secret_key_secret (SecretKeySelector): [optional] # noqa: E501 - security_token (str): SecurityToken is the user's temporary security token. For more details, check out: https://www.alibabacloud.com/help/doc-detail/100624.htm. [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OSSArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key_secret (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - create_bucket_if_not_present (bool): CreateBucketIfNotPresent tells the driver to attempt to create the OSS bucket for output artifacts, if it doesn't exist. [optional] # noqa: E501 - endpoint (str): Endpoint is the hostname of the bucket endpoint. [optional] # noqa: E501 - key_format (str): KeyFormat defines the format of how to store keys and can reference workflow variables.. [optional] # noqa: E501 - lifecycle_rule (IoArgoprojWorkflowV1alpha1OSSLifecycleRule): [optional] # noqa: E501 - secret_key_secret (SecretKeySelector): [optional] # noqa: E501 - security_token (str): SecurityToken is the user's temporary security token. For more details, check out: https://www.alibabacloud.com/help/doc-detail/100624.htm. [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_lifecycle_rule.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_lifecycle_rule.py deleted file mode 100644 index 096192ea6e17..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_oss_lifecycle_rule.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1OSSLifecycleRule(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'mark_deletion_after_days': (int,), # noqa: E501 - 'mark_infrequent_access_after_days': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'mark_deletion_after_days': 'markDeletionAfterDays', # noqa: E501 - 'mark_infrequent_access_after_days': 'markInfrequentAccessAfterDays', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OSSLifecycleRule - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mark_deletion_after_days (int): MarkDeletionAfterDays is the number of days before we delete objects in the bucket. [optional] # noqa: E501 - mark_infrequent_access_after_days (int): MarkInfrequentAccessAfterDays is the number of days before we convert the objects in the bucket to Infrequent Access (IA) storage type. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1OSSLifecycleRule - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mark_deletion_after_days (int): MarkDeletionAfterDays is the number of days before we delete objects in the bucket. [optional] # noqa: E501 - mark_infrequent_access_after_days (int): MarkInfrequentAccessAfterDays is the number of days before we convert the objects in the bucket to Infrequent Access (IA) storage type. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_outputs.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_outputs.py deleted file mode 100644 index 725e6faa8a8f..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_outputs.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact - from argo_workflows.model.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter - globals()['IoArgoprojWorkflowV1alpha1Artifact'] = IoArgoprojWorkflowV1alpha1Artifact - globals()['IoArgoprojWorkflowV1alpha1Parameter'] = IoArgoprojWorkflowV1alpha1Parameter - - -class IoArgoprojWorkflowV1alpha1Outputs(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifacts': ([IoArgoprojWorkflowV1alpha1Artifact],), # noqa: E501 - 'exit_code': (str,), # noqa: E501 - 'parameters': ([IoArgoprojWorkflowV1alpha1Parameter],), # noqa: E501 - 'result': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifacts': 'artifacts', # noqa: E501 - 'exit_code': 'exitCode', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Outputs - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifacts ([IoArgoprojWorkflowV1alpha1Artifact]): Artifacts holds the list of output artifacts produced by a step. [optional] # noqa: E501 - exit_code (str): ExitCode holds the exit code of a script template. [optional] # noqa: E501 - parameters ([IoArgoprojWorkflowV1alpha1Parameter]): Parameters holds the list of output parameters produced by a step. [optional] # noqa: E501 - result (str): Result holds the result (stdout) of a script template. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Outputs - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifacts ([IoArgoprojWorkflowV1alpha1Artifact]): Artifacts holds the list of output artifacts produced by a step. [optional] # noqa: E501 - exit_code (str): ExitCode holds the exit code of a script template. [optional] # noqa: E501 - parameters ([IoArgoprojWorkflowV1alpha1Parameter]): Parameters holds the list of output parameters produced by a step. [optional] # noqa: E501 - result (str): Result holds the result (stdout) of a script template. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_parallel_steps.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_parallel_steps.py deleted file mode 100644 index 6a79eeb037a8..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_parallel_steps.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_step import IoArgoprojWorkflowV1alpha1WorkflowStep - globals()['IoArgoprojWorkflowV1alpha1WorkflowStep'] = IoArgoprojWorkflowV1alpha1WorkflowStep - - -class IoArgoprojWorkflowV1alpha1ParallelSteps(ModelSimple): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - additional_properties_type = None - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'value': ([IoArgoprojWorkflowV1alpha1WorkflowStep],), - } - - @cached_property - def discriminator(): - return None - - - attribute_map = {} - - read_only_vars = set() - - _composed_schemas = None - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): - """IoArgoprojWorkflowV1alpha1ParallelSteps - a model defined in OpenAPI - - Note that value can be passed either in args or in kwargs, but not in both. - - Args: - args[0] ([IoArgoprojWorkflowV1alpha1WorkflowStep]): # noqa: E501 - - Keyword Args: - value ([IoArgoprojWorkflowV1alpha1WorkflowStep]): # noqa: E501 - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - # required up here when default value is not given - _path_to_item = kwargs.pop('_path_to_item', ()) - - if 'value' in kwargs: - value = kwargs.pop('value') - elif args: - args = list(args) - value = args.pop(0) - else: - raise ApiTypeError( - "value is required, but not passed in args or kwargs and doesn't have default", - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.value = value - if kwargs: - raise ApiTypeError( - "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % ( - kwargs, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): - """IoArgoprojWorkflowV1alpha1ParallelSteps - a model defined in OpenAPI - - Note that value can be passed either in args or in kwargs, but not in both. - - Args: - args[0] ([IoArgoprojWorkflowV1alpha1WorkflowStep]): # noqa: E501 - - Keyword Args: - value ([IoArgoprojWorkflowV1alpha1WorkflowStep]): # noqa: E501 - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - # required up here when default value is not given - _path_to_item = kwargs.pop('_path_to_item', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if 'value' in kwargs: - value = kwargs.pop('value') - elif args: - args = list(args) - value = args.pop(0) - else: - raise ApiTypeError( - "value is required, but not passed in args or kwargs and doesn't have default", - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.value = value - if kwargs: - raise ApiTypeError( - "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % ( - kwargs, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - return self diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_parameter.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_parameter.py deleted file mode 100644 index 8415cb8ab6db..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_parameter.py +++ /dev/null @@ -1,291 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_value_from import IoArgoprojWorkflowV1alpha1ValueFrom - globals()['IoArgoprojWorkflowV1alpha1ValueFrom'] = IoArgoprojWorkflowV1alpha1ValueFrom - - -class IoArgoprojWorkflowV1alpha1Parameter(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'default': (str,), # noqa: E501 - 'description': (str,), # noqa: E501 - 'enum': ([str],), # noqa: E501 - 'global_name': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - 'value_from': (IoArgoprojWorkflowV1alpha1ValueFrom,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'default': 'default', # noqa: E501 - 'description': 'description', # noqa: E501 - 'enum': 'enum', # noqa: E501 - 'global_name': 'globalName', # noqa: E501 - 'value': 'value', # noqa: E501 - 'value_from': 'valueFrom', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Parameter - a model defined in OpenAPI - - Args: - name (str): Name is the parameter name - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default (str): Default is the default value to use for an input parameter if a value was not supplied. [optional] # noqa: E501 - description (str): Description is the parameter description. [optional] # noqa: E501 - enum ([str]): Enum holds a list of string values to choose from, for the actual value of the parameter. [optional] # noqa: E501 - global_name (str): GlobalName exports an output parameter to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.parameters.XXXX}} and in workflow.status.outputs.parameters. [optional] # noqa: E501 - value (str): Value is the literal value to use for the parameter. If specified in the context of an input parameter, the value takes precedence over any passed values. [optional] # noqa: E501 - value_from (IoArgoprojWorkflowV1alpha1ValueFrom): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Parameter - a model defined in OpenAPI - - Args: - name (str): Name is the parameter name - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default (str): Default is the default value to use for an input parameter if a value was not supplied. [optional] # noqa: E501 - description (str): Description is the parameter description. [optional] # noqa: E501 - enum ([str]): Enum holds a list of string values to choose from, for the actual value of the parameter. [optional] # noqa: E501 - global_name (str): GlobalName exports an output parameter to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.parameters.XXXX}} and in workflow.status.outputs.parameters. [optional] # noqa: E501 - value (str): Value is the literal value to use for the parameter. If specified in the context of an input parameter, the value takes precedence over any passed values. [optional] # noqa: E501 - value_from (IoArgoprojWorkflowV1alpha1ValueFrom): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_pod_gc.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_pod_gc.py deleted file mode 100644 index 06ccce15c88d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_pod_gc.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.duration import Duration - from argo_workflows.model.label_selector import LabelSelector - globals()['Duration'] = Duration - globals()['LabelSelector'] = LabelSelector - - -class IoArgoprojWorkflowV1alpha1PodGC(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'delete_delay_duration': (Duration,), # noqa: E501 - 'label_selector': (LabelSelector,), # noqa: E501 - 'strategy': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'delete_delay_duration': 'deleteDelayDuration', # noqa: E501 - 'label_selector': 'labelSelector', # noqa: E501 - 'strategy': 'strategy', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1PodGC - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - delete_delay_duration (Duration): [optional] # noqa: E501 - label_selector (LabelSelector): [optional] # noqa: E501 - strategy (str): Strategy is the strategy to use. One of \"OnPodCompletion\", \"OnPodSuccess\", \"OnWorkflowCompletion\", \"OnWorkflowSuccess\". If unset, does not delete Pods. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1PodGC - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - delete_delay_duration (Duration): [optional] # noqa: E501 - label_selector (LabelSelector): [optional] # noqa: E501 - strategy (str): Strategy is the strategy to use. One of \"OnPodCompletion\", \"OnPodSuccess\", \"OnWorkflowCompletion\", \"OnWorkflowSuccess\". If unset, does not delete Pods. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_prometheus.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_prometheus.py deleted file mode 100644 index 82d07a01baf9..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_prometheus.py +++ /dev/null @@ -1,299 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_counter import IoArgoprojWorkflowV1alpha1Counter - from argo_workflows.model.io_argoproj_workflow_v1alpha1_gauge import IoArgoprojWorkflowV1alpha1Gauge - from argo_workflows.model.io_argoproj_workflow_v1alpha1_histogram import IoArgoprojWorkflowV1alpha1Histogram - from argo_workflows.model.io_argoproj_workflow_v1alpha1_metric_label import IoArgoprojWorkflowV1alpha1MetricLabel - globals()['IoArgoprojWorkflowV1alpha1Counter'] = IoArgoprojWorkflowV1alpha1Counter - globals()['IoArgoprojWorkflowV1alpha1Gauge'] = IoArgoprojWorkflowV1alpha1Gauge - globals()['IoArgoprojWorkflowV1alpha1Histogram'] = IoArgoprojWorkflowV1alpha1Histogram - globals()['IoArgoprojWorkflowV1alpha1MetricLabel'] = IoArgoprojWorkflowV1alpha1MetricLabel - - -class IoArgoprojWorkflowV1alpha1Prometheus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'help': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'counter': (IoArgoprojWorkflowV1alpha1Counter,), # noqa: E501 - 'gauge': (IoArgoprojWorkflowV1alpha1Gauge,), # noqa: E501 - 'histogram': (IoArgoprojWorkflowV1alpha1Histogram,), # noqa: E501 - 'labels': ([IoArgoprojWorkflowV1alpha1MetricLabel],), # noqa: E501 - 'when': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'help': 'help', # noqa: E501 - 'name': 'name', # noqa: E501 - 'counter': 'counter', # noqa: E501 - 'gauge': 'gauge', # noqa: E501 - 'histogram': 'histogram', # noqa: E501 - 'labels': 'labels', # noqa: E501 - 'when': 'when', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, help, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Prometheus - a model defined in OpenAPI - - Args: - help (str): Help is a string that describes the metric - name (str): Name is the name of the metric - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - counter (IoArgoprojWorkflowV1alpha1Counter): [optional] # noqa: E501 - gauge (IoArgoprojWorkflowV1alpha1Gauge): [optional] # noqa: E501 - histogram (IoArgoprojWorkflowV1alpha1Histogram): [optional] # noqa: E501 - labels ([IoArgoprojWorkflowV1alpha1MetricLabel]): Labels is a list of metric labels. [optional] # noqa: E501 - when (str): When is a conditional statement that decides when to emit the metric. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.help = help - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, help, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Prometheus - a model defined in OpenAPI - - Args: - help (str): Help is a string that describes the metric - name (str): Name is the name of the metric - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - counter (IoArgoprojWorkflowV1alpha1Counter): [optional] # noqa: E501 - gauge (IoArgoprojWorkflowV1alpha1Gauge): [optional] # noqa: E501 - histogram (IoArgoprojWorkflowV1alpha1Histogram): [optional] # noqa: E501 - labels ([IoArgoprojWorkflowV1alpha1MetricLabel]): Labels is a list of metric labels. [optional] # noqa: E501 - when (str): When is a conditional statement that decides when to emit the metric. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.help = help - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_raw_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_raw_artifact.py deleted file mode 100644 index 90bff2a0c528..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_raw_artifact.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1RawArtifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'data': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'data': 'data', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, data, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1RawArtifact - a model defined in OpenAPI - - Args: - data (str): Data is the string contents of the artifact - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.data = data - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, data, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1RawArtifact - a model defined in OpenAPI - - Args: - data (str): Data is the string contents of the artifact - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.data = data - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resource_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resource_template.py deleted file mode 100644 index a228a09e7614..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resource_template.py +++ /dev/null @@ -1,295 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_manifest_from import IoArgoprojWorkflowV1alpha1ManifestFrom - globals()['IoArgoprojWorkflowV1alpha1ManifestFrom'] = IoArgoprojWorkflowV1alpha1ManifestFrom - - -class IoArgoprojWorkflowV1alpha1ResourceTemplate(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'action': (str,), # noqa: E501 - 'failure_condition': (str,), # noqa: E501 - 'flags': ([str],), # noqa: E501 - 'manifest': (str,), # noqa: E501 - 'manifest_from': (IoArgoprojWorkflowV1alpha1ManifestFrom,), # noqa: E501 - 'merge_strategy': (str,), # noqa: E501 - 'set_owner_reference': (bool,), # noqa: E501 - 'success_condition': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'action': 'action', # noqa: E501 - 'failure_condition': 'failureCondition', # noqa: E501 - 'flags': 'flags', # noqa: E501 - 'manifest': 'manifest', # noqa: E501 - 'manifest_from': 'manifestFrom', # noqa: E501 - 'merge_strategy': 'mergeStrategy', # noqa: E501 - 'set_owner_reference': 'setOwnerReference', # noqa: E501 - 'success_condition': 'successCondition', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, action, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ResourceTemplate - a model defined in OpenAPI - - Args: - action (str): Action is the action to perform to the resource. Must be one of: get, create, apply, delete, replace, patch - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - failure_condition (str): FailureCondition is a label selector expression which describes the conditions of the k8s resource in which the step was considered failed. [optional] # noqa: E501 - flags ([str]): Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ \"--validate=false\" # disable resource validation ]. [optional] # noqa: E501 - manifest (str): Manifest contains the kubernetes manifest. [optional] # noqa: E501 - manifest_from (IoArgoprojWorkflowV1alpha1ManifestFrom): [optional] # noqa: E501 - merge_strategy (str): MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json. [optional] # noqa: E501 - set_owner_reference (bool): SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource.. [optional] # noqa: E501 - success_condition (str): SuccessCondition is a label selector expression which describes the conditions of the k8s resource in which it is acceptable to proceed to the following step. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.action = action - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, action, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ResourceTemplate - a model defined in OpenAPI - - Args: - action (str): Action is the action to perform to the resource. Must be one of: get, create, apply, delete, replace, patch - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - failure_condition (str): FailureCondition is a label selector expression which describes the conditions of the k8s resource in which the step was considered failed. [optional] # noqa: E501 - flags ([str]): Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ \"--validate=false\" # disable resource validation ]. [optional] # noqa: E501 - manifest (str): Manifest contains the kubernetes manifest. [optional] # noqa: E501 - manifest_from (IoArgoprojWorkflowV1alpha1ManifestFrom): [optional] # noqa: E501 - merge_strategy (str): MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json. [optional] # noqa: E501 - set_owner_reference (bool): SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource.. [optional] # noqa: E501 - success_condition (str): SuccessCondition is a label selector expression which describes the conditions of the k8s resource in which it is acceptable to proceed to the following step. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.action = action - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py deleted file mode 100644 index 076a15fd3e98..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'memoized': (bool,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'parameters': ([str],), # noqa: E501 - 'uid': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'memoized': 'memoized', # noqa: E501 - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'uid': 'uid', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - memoized (bool): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - parameters ([str]): [optional] # noqa: E501 - uid (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - memoized (bool): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - parameters ([str]): [optional] # noqa: E501 - uid (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_affinity.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_affinity.py deleted file mode 100644 index 3bc39ed6d26e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_affinity.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1RetryAffinity(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'node_anti_affinity': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'node_anti_affinity': 'nodeAntiAffinity', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1RetryAffinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - node_anti_affinity (bool, date, datetime, dict, float, int, list, str, none_type): RetryNodeAntiAffinity is a placeholder for future expansion, only empty nodeAntiAffinity is allowed. In order to prevent running steps on the same host, it uses \"kubernetes.io/hostname\".. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1RetryAffinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - node_anti_affinity (bool, date, datetime, dict, float, int, list, str, none_type): RetryNodeAntiAffinity is a placeholder for future expansion, only empty nodeAntiAffinity is allowed. In order to prevent running steps on the same host, it uses \"kubernetes.io/hostname\".. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py deleted file mode 100644 index fcd68a5cb29e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'node_field_selector': (str,), # noqa: E501 - 'parameters': ([str],), # noqa: E501 - 'restart_successful': (bool,), # noqa: E501 - 'uid': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'node_field_selector': 'nodeFieldSelector', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'restart_successful': 'restartSuccessful', # noqa: E501 - 'uid': 'uid', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - parameters ([str]): [optional] # noqa: E501 - restart_successful (bool): [optional] # noqa: E501 - uid (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - parameters ([str]): [optional] # noqa: E501 - restart_successful (bool): [optional] # noqa: E501 - uid (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_strategy.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_strategy.py deleted file mode 100644 index a665c3fbf216..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_retry_strategy.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_backoff import IoArgoprojWorkflowV1alpha1Backoff - from argo_workflows.model.io_argoproj_workflow_v1alpha1_retry_affinity import IoArgoprojWorkflowV1alpha1RetryAffinity - globals()['IoArgoprojWorkflowV1alpha1Backoff'] = IoArgoprojWorkflowV1alpha1Backoff - globals()['IoArgoprojWorkflowV1alpha1RetryAffinity'] = IoArgoprojWorkflowV1alpha1RetryAffinity - - -class IoArgoprojWorkflowV1alpha1RetryStrategy(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'affinity': (IoArgoprojWorkflowV1alpha1RetryAffinity,), # noqa: E501 - 'backoff': (IoArgoprojWorkflowV1alpha1Backoff,), # noqa: E501 - 'expression': (str,), # noqa: E501 - 'limit': (str,), # noqa: E501 - 'retry_policy': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'affinity': 'affinity', # noqa: E501 - 'backoff': 'backoff', # noqa: E501 - 'expression': 'expression', # noqa: E501 - 'limit': 'limit', # noqa: E501 - 'retry_policy': 'retryPolicy', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1RetryStrategy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - affinity (IoArgoprojWorkflowV1alpha1RetryAffinity): [optional] # noqa: E501 - backoff (IoArgoprojWorkflowV1alpha1Backoff): [optional] # noqa: E501 - expression (str): Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored. [optional] # noqa: E501 - limit (str): [optional] # noqa: E501 - retry_policy (str): RetryPolicy is a policy of NodePhase statuses that will be retried. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1RetryStrategy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - affinity (IoArgoprojWorkflowV1alpha1RetryAffinity): [optional] # noqa: E501 - backoff (IoArgoprojWorkflowV1alpha1Backoff): [optional] # noqa: E501 - expression (str): Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored. [optional] # noqa: E501 - limit (str): [optional] # noqa: E501 - retry_policy (str): RetryPolicy is a policy of NodePhase statuses that will be retried. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_artifact.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_artifact.py deleted file mode 100644 index fd019b40a4b6..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_artifact.py +++ /dev/null @@ -1,309 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_create_s3_bucket_options import IoArgoprojWorkflowV1alpha1CreateS3BucketOptions - from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_encryption_options import IoArgoprojWorkflowV1alpha1S3EncryptionOptions - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojWorkflowV1alpha1CreateS3BucketOptions'] = IoArgoprojWorkflowV1alpha1CreateS3BucketOptions - globals()['IoArgoprojWorkflowV1alpha1S3EncryptionOptions'] = IoArgoprojWorkflowV1alpha1S3EncryptionOptions - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1S3Artifact(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_key_secret': (SecretKeySelector,), # noqa: E501 - 'bucket': (str,), # noqa: E501 - 'ca_secret': (SecretKeySelector,), # noqa: E501 - 'create_bucket_if_not_present': (IoArgoprojWorkflowV1alpha1CreateS3BucketOptions,), # noqa: E501 - 'encryption_options': (IoArgoprojWorkflowV1alpha1S3EncryptionOptions,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'insecure': (bool,), # noqa: E501 - 'key': (str,), # noqa: E501 - 'region': (str,), # noqa: E501 - 'role_arn': (str,), # noqa: E501 - 'secret_key_secret': (SecretKeySelector,), # noqa: E501 - 'use_sdk_creds': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_key_secret': 'accessKeySecret', # noqa: E501 - 'bucket': 'bucket', # noqa: E501 - 'ca_secret': 'caSecret', # noqa: E501 - 'create_bucket_if_not_present': 'createBucketIfNotPresent', # noqa: E501 - 'encryption_options': 'encryptionOptions', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'insecure': 'insecure', # noqa: E501 - 'key': 'key', # noqa: E501 - 'region': 'region', # noqa: E501 - 'role_arn': 'roleARN', # noqa: E501 - 'secret_key_secret': 'secretKeySecret', # noqa: E501 - 'use_sdk_creds': 'useSDKCreds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1S3Artifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key_secret (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - ca_secret (SecretKeySelector): [optional] # noqa: E501 - create_bucket_if_not_present (IoArgoprojWorkflowV1alpha1CreateS3BucketOptions): [optional] # noqa: E501 - encryption_options (IoArgoprojWorkflowV1alpha1S3EncryptionOptions): [optional] # noqa: E501 - endpoint (str): Endpoint is the hostname of the bucket endpoint. [optional] # noqa: E501 - insecure (bool): Insecure will connect to the service with TLS. [optional] # noqa: E501 - key (str): Key is the key in the bucket where the artifact resides. [optional] # noqa: E501 - region (str): Region contains the optional bucket region. [optional] # noqa: E501 - role_arn (str): RoleARN is the Amazon Resource Name (ARN) of the role to assume.. [optional] # noqa: E501 - secret_key_secret (SecretKeySelector): [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1S3Artifact - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key_secret (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - ca_secret (SecretKeySelector): [optional] # noqa: E501 - create_bucket_if_not_present (IoArgoprojWorkflowV1alpha1CreateS3BucketOptions): [optional] # noqa: E501 - encryption_options (IoArgoprojWorkflowV1alpha1S3EncryptionOptions): [optional] # noqa: E501 - endpoint (str): Endpoint is the hostname of the bucket endpoint. [optional] # noqa: E501 - insecure (bool): Insecure will connect to the service with TLS. [optional] # noqa: E501 - key (str): Key is the key in the bucket where the artifact resides. [optional] # noqa: E501 - region (str): Region contains the optional bucket region. [optional] # noqa: E501 - role_arn (str): RoleARN is the Amazon Resource Name (ARN) of the role to assume.. [optional] # noqa: E501 - secret_key_secret (SecretKeySelector): [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_artifact_repository.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_artifact_repository.py deleted file mode 100644 index c61fa854ac92..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_artifact_repository.py +++ /dev/null @@ -1,313 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_create_s3_bucket_options import IoArgoprojWorkflowV1alpha1CreateS3BucketOptions - from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_encryption_options import IoArgoprojWorkflowV1alpha1S3EncryptionOptions - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojWorkflowV1alpha1CreateS3BucketOptions'] = IoArgoprojWorkflowV1alpha1CreateS3BucketOptions - globals()['IoArgoprojWorkflowV1alpha1S3EncryptionOptions'] = IoArgoprojWorkflowV1alpha1S3EncryptionOptions - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1S3ArtifactRepository(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_key_secret': (SecretKeySelector,), # noqa: E501 - 'bucket': (str,), # noqa: E501 - 'ca_secret': (SecretKeySelector,), # noqa: E501 - 'create_bucket_if_not_present': (IoArgoprojWorkflowV1alpha1CreateS3BucketOptions,), # noqa: E501 - 'encryption_options': (IoArgoprojWorkflowV1alpha1S3EncryptionOptions,), # noqa: E501 - 'endpoint': (str,), # noqa: E501 - 'insecure': (bool,), # noqa: E501 - 'key_format': (str,), # noqa: E501 - 'key_prefix': (str,), # noqa: E501 - 'region': (str,), # noqa: E501 - 'role_arn': (str,), # noqa: E501 - 'secret_key_secret': (SecretKeySelector,), # noqa: E501 - 'use_sdk_creds': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_key_secret': 'accessKeySecret', # noqa: E501 - 'bucket': 'bucket', # noqa: E501 - 'ca_secret': 'caSecret', # noqa: E501 - 'create_bucket_if_not_present': 'createBucketIfNotPresent', # noqa: E501 - 'encryption_options': 'encryptionOptions', # noqa: E501 - 'endpoint': 'endpoint', # noqa: E501 - 'insecure': 'insecure', # noqa: E501 - 'key_format': 'keyFormat', # noqa: E501 - 'key_prefix': 'keyPrefix', # noqa: E501 - 'region': 'region', # noqa: E501 - 'role_arn': 'roleARN', # noqa: E501 - 'secret_key_secret': 'secretKeySecret', # noqa: E501 - 'use_sdk_creds': 'useSDKCreds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1S3ArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key_secret (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - ca_secret (SecretKeySelector): [optional] # noqa: E501 - create_bucket_if_not_present (IoArgoprojWorkflowV1alpha1CreateS3BucketOptions): [optional] # noqa: E501 - encryption_options (IoArgoprojWorkflowV1alpha1S3EncryptionOptions): [optional] # noqa: E501 - endpoint (str): Endpoint is the hostname of the bucket endpoint. [optional] # noqa: E501 - insecure (bool): Insecure will connect to the service with TLS. [optional] # noqa: E501 - key_format (str): KeyFormat defines the format of how to store keys and can reference workflow variables.. [optional] # noqa: E501 - key_prefix (str): KeyPrefix is prefix used as part of the bucket key in which the controller will store artifacts. DEPRECATED. Use KeyFormat instead. [optional] # noqa: E501 - region (str): Region contains the optional bucket region. [optional] # noqa: E501 - role_arn (str): RoleARN is the Amazon Resource Name (ARN) of the role to assume.. [optional] # noqa: E501 - secret_key_secret (SecretKeySelector): [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1S3ArtifactRepository - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_key_secret (SecretKeySelector): [optional] # noqa: E501 - bucket (str): Bucket is the name of the bucket. [optional] # noqa: E501 - ca_secret (SecretKeySelector): [optional] # noqa: E501 - create_bucket_if_not_present (IoArgoprojWorkflowV1alpha1CreateS3BucketOptions): [optional] # noqa: E501 - encryption_options (IoArgoprojWorkflowV1alpha1S3EncryptionOptions): [optional] # noqa: E501 - endpoint (str): Endpoint is the hostname of the bucket endpoint. [optional] # noqa: E501 - insecure (bool): Insecure will connect to the service with TLS. [optional] # noqa: E501 - key_format (str): KeyFormat defines the format of how to store keys and can reference workflow variables.. [optional] # noqa: E501 - key_prefix (str): KeyPrefix is prefix used as part of the bucket key in which the controller will store artifacts. DEPRECATED. Use KeyFormat instead. [optional] # noqa: E501 - region (str): Region contains the optional bucket region. [optional] # noqa: E501 - role_arn (str): RoleARN is the Amazon Resource Name (ARN) of the role to assume.. [optional] # noqa: E501 - secret_key_secret (SecretKeySelector): [optional] # noqa: E501 - use_sdk_creds (bool): UseSDKCreds tells the driver to figure out credentials based on sdk defaults.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_encryption_options.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_encryption_options.py deleted file mode 100644 index 1e1241e9205e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_s3_encryption_options.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['SecretKeySelector'] = SecretKeySelector - - -class IoArgoprojWorkflowV1alpha1S3EncryptionOptions(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'enable_encryption': (bool,), # noqa: E501 - 'kms_encryption_context': (str,), # noqa: E501 - 'kms_key_id': (str,), # noqa: E501 - 'server_side_customer_key_secret': (SecretKeySelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'enable_encryption': 'enableEncryption', # noqa: E501 - 'kms_encryption_context': 'kmsEncryptionContext', # noqa: E501 - 'kms_key_id': 'kmsKeyId', # noqa: E501 - 'server_side_customer_key_secret': 'serverSideCustomerKeySecret', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1S3EncryptionOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - enable_encryption (bool): EnableEncryption tells the driver to encrypt objects if set to true. If kmsKeyId and serverSideCustomerKeySecret are not set, SSE-S3 will be used. [optional] # noqa: E501 - kms_encryption_context (str): KmsEncryptionContext is a json blob that contains an encryption context. See https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context for more information. [optional] # noqa: E501 - kms_key_id (str): KMSKeyId tells the driver to encrypt the object using the specified KMS Key.. [optional] # noqa: E501 - server_side_customer_key_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1S3EncryptionOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - enable_encryption (bool): EnableEncryption tells the driver to encrypt objects if set to true. If kmsKeyId and serverSideCustomerKeySecret are not set, SSE-S3 will be used. [optional] # noqa: E501 - kms_encryption_context (str): KmsEncryptionContext is a json blob that contains an encryption context. See https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context for more information. [optional] # noqa: E501 - kms_key_id (str): KMSKeyId tells the driver to encrypt the object using the specified KMS Key.. [optional] # noqa: E501 - server_side_customer_key_secret (SecretKeySelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_script_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_script_template.py deleted file mode 100644 index 705f283bf03d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_script_template.py +++ /dev/null @@ -1,373 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.container_port import ContainerPort - from argo_workflows.model.env_from_source import EnvFromSource - from argo_workflows.model.env_var import EnvVar - from argo_workflows.model.lifecycle import Lifecycle - from argo_workflows.model.probe import Probe - from argo_workflows.model.resource_requirements import ResourceRequirements - from argo_workflows.model.security_context import SecurityContext - from argo_workflows.model.volume_device import VolumeDevice - from argo_workflows.model.volume_mount import VolumeMount - globals()['ContainerPort'] = ContainerPort - globals()['EnvFromSource'] = EnvFromSource - globals()['EnvVar'] = EnvVar - globals()['Lifecycle'] = Lifecycle - globals()['Probe'] = Probe - globals()['ResourceRequirements'] = ResourceRequirements - globals()['SecurityContext'] = SecurityContext - globals()['VolumeDevice'] = VolumeDevice - globals()['VolumeMount'] = VolumeMount - - -class IoArgoprojWorkflowV1alpha1ScriptTemplate(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'image': (str,), # noqa: E501 - 'source': (str,), # noqa: E501 - 'args': ([str],), # noqa: E501 - 'command': ([str],), # noqa: E501 - 'env': ([EnvVar],), # noqa: E501 - 'env_from': ([EnvFromSource],), # noqa: E501 - 'image_pull_policy': (str,), # noqa: E501 - 'lifecycle': (Lifecycle,), # noqa: E501 - 'liveness_probe': (Probe,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'ports': ([ContainerPort],), # noqa: E501 - 'readiness_probe': (Probe,), # noqa: E501 - 'resources': (ResourceRequirements,), # noqa: E501 - 'security_context': (SecurityContext,), # noqa: E501 - 'startup_probe': (Probe,), # noqa: E501 - 'stdin': (bool,), # noqa: E501 - 'stdin_once': (bool,), # noqa: E501 - 'termination_message_path': (str,), # noqa: E501 - 'termination_message_policy': (str,), # noqa: E501 - 'tty': (bool,), # noqa: E501 - 'volume_devices': ([VolumeDevice],), # noqa: E501 - 'volume_mounts': ([VolumeMount],), # noqa: E501 - 'working_dir': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'image': 'image', # noqa: E501 - 'source': 'source', # noqa: E501 - 'args': 'args', # noqa: E501 - 'command': 'command', # noqa: E501 - 'env': 'env', # noqa: E501 - 'env_from': 'envFrom', # noqa: E501 - 'image_pull_policy': 'imagePullPolicy', # noqa: E501 - 'lifecycle': 'lifecycle', # noqa: E501 - 'liveness_probe': 'livenessProbe', # noqa: E501 - 'name': 'name', # noqa: E501 - 'ports': 'ports', # noqa: E501 - 'readiness_probe': 'readinessProbe', # noqa: E501 - 'resources': 'resources', # noqa: E501 - 'security_context': 'securityContext', # noqa: E501 - 'startup_probe': 'startupProbe', # noqa: E501 - 'stdin': 'stdin', # noqa: E501 - 'stdin_once': 'stdinOnce', # noqa: E501 - 'termination_message_path': 'terminationMessagePath', # noqa: E501 - 'termination_message_policy': 'terminationMessagePolicy', # noqa: E501 - 'tty': 'tty', # noqa: E501 - 'volume_devices': 'volumeDevices', # noqa: E501 - 'volume_mounts': 'volumeMounts', # noqa: E501 - 'working_dir': 'workingDir', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, image, source, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ScriptTemplate - a model defined in OpenAPI - - Args: - image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. - source (str): Source contains the source code of the script to execute - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 - env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 - lifecycle (Lifecycle): [optional] # noqa: E501 - liveness_probe (Probe): [optional] # noqa: E501 - name (str): Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated.. [optional] # noqa: E501 - ports ([ContainerPort]): List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated.. [optional] # noqa: E501 - readiness_probe (Probe): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - security_context (SecurityContext): [optional] # noqa: E501 - startup_probe (Probe): [optional] # noqa: E501 - stdin (bool): Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.. [optional] # noqa: E501 - stdin_once (bool): Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false. [optional] # noqa: E501 - termination_message_path (str): Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.. [optional] # noqa: E501 - termination_message_policy (str): Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated.. [optional] # noqa: E501 - tty (bool): Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.. [optional] # noqa: E501 - volume_devices ([VolumeDevice]): volumeDevices is the list of block devices to be used by the container.. [optional] # noqa: E501 - volume_mounts ([VolumeMount]): Pod volumes to mount into the container's filesystem. Cannot be updated.. [optional] # noqa: E501 - working_dir (str): Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.image = image - self.source = source - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, image, source, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ScriptTemplate - a model defined in OpenAPI - - Args: - image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. - source (str): Source contains the source code of the script to execute - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 - env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 - lifecycle (Lifecycle): [optional] # noqa: E501 - liveness_probe (Probe): [optional] # noqa: E501 - name (str): Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated.. [optional] # noqa: E501 - ports ([ContainerPort]): List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated.. [optional] # noqa: E501 - readiness_probe (Probe): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - security_context (SecurityContext): [optional] # noqa: E501 - startup_probe (Probe): [optional] # noqa: E501 - stdin (bool): Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.. [optional] # noqa: E501 - stdin_once (bool): Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false. [optional] # noqa: E501 - termination_message_path (str): Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.. [optional] # noqa: E501 - termination_message_policy (str): Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated.. [optional] # noqa: E501 - tty (bool): Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.. [optional] # noqa: E501 - volume_devices ([VolumeDevice]): volumeDevices is the list of block devices to be used by the container.. [optional] # noqa: E501 - volume_mounts ([VolumeMount]): Pod volumes to mount into the container's filesystem. Cannot be updated.. [optional] # noqa: E501 - working_dir (str): Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.image = image - self.source = source - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_holding.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_holding.py deleted file mode 100644 index f3f75ce5c515..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_holding.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1SemaphoreHolding(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'holders': ([str],), # noqa: E501 - 'semaphore': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'holders': 'holders', # noqa: E501 - 'semaphore': 'semaphore', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SemaphoreHolding - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - holders ([str]): Holders stores the list of current holder names in the io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - semaphore (str): Semaphore stores the semaphore name.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SemaphoreHolding - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - holders ([str]): Holders stores the list of current holder names in the io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - semaphore (str): Semaphore stores the semaphore name.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_ref.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_ref.py deleted file mode 100644 index 0d382bc48a74..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_ref.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - - -class IoArgoprojWorkflowV1alpha1SemaphoreRef(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'config_map_key_ref': (ConfigMapKeySelector,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'config_map_key_ref': 'configMapKeyRef', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SemaphoreRef - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 - namespace (str): Namespace is the namespace of the configmap, default: [namespace of workflow]. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SemaphoreRef - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 - namespace (str): Namespace is the namespace of the configmap, default: [namespace of workflow]. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_status.py deleted file mode 100644 index f68e7856de0c..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_semaphore_status.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_semaphore_holding import IoArgoprojWorkflowV1alpha1SemaphoreHolding - globals()['IoArgoprojWorkflowV1alpha1SemaphoreHolding'] = IoArgoprojWorkflowV1alpha1SemaphoreHolding - - -class IoArgoprojWorkflowV1alpha1SemaphoreStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'holding': ([IoArgoprojWorkflowV1alpha1SemaphoreHolding],), # noqa: E501 - 'waiting': ([IoArgoprojWorkflowV1alpha1SemaphoreHolding],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'holding': 'holding', # noqa: E501 - 'waiting': 'waiting', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SemaphoreStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - holding ([IoArgoprojWorkflowV1alpha1SemaphoreHolding]): Holding stores the list of resource acquired synchronization lock for workflows.. [optional] # noqa: E501 - waiting ([IoArgoprojWorkflowV1alpha1SemaphoreHolding]): Waiting indicates the list of current synchronization lock holders.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SemaphoreStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - holding ([IoArgoprojWorkflowV1alpha1SemaphoreHolding]): Holding stores the list of resource acquired synchronization lock for workflows.. [optional] # noqa: E501 - waiting ([IoArgoprojWorkflowV1alpha1SemaphoreHolding]): Waiting indicates the list of current synchronization lock holders.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_sequence.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_sequence.py deleted file mode 100644 index 0c2dbadc0fb4..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_sequence.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Sequence(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'count': (str,), # noqa: E501 - 'end': (str,), # noqa: E501 - 'format': (str,), # noqa: E501 - 'start': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'count': 'count', # noqa: E501 - 'end': 'end', # noqa: E501 - 'format': 'format', # noqa: E501 - 'start': 'start', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Sequence - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - count (str): [optional] # noqa: E501 - end (str): [optional] # noqa: E501 - format (str): Format is a printf format string to format the value in the sequence. [optional] # noqa: E501 - start (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Sequence - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - count (str): [optional] # noqa: E501 - end (str): [optional] # noqa: E501 - format (str): Format is a printf format string to format the value in the sequence. [optional] # noqa: E501 - start (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_stop_strategy.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_stop_strategy.py deleted file mode 100644 index e0cbe9b6d5b7..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_stop_strategy.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1StopStrategy(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'condition': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'condition': 'condition', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, condition, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1StopStrategy - a model defined in OpenAPI - - Args: - condition (str): Condition defines a condition that stops scheduling workflows when evaluates to true. Use the keywords `failed` or `succeeded` to access the number of failed or successful child workflows. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.condition = condition - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, condition, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1StopStrategy - a model defined in OpenAPI - - Args: - condition (str): Condition defines a condition that stops scheduling workflows when evaluates to true. Use the keywords `failed` or `succeeded` to access the number of failed or successful child workflows. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.condition = condition - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_submit.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_submit.py deleted file mode 100644 index b1a1401b0945..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_submit.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_ref import IoArgoprojWorkflowV1alpha1WorkflowTemplateRef - from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojWorkflowV1alpha1Arguments'] = IoArgoprojWorkflowV1alpha1Arguments - globals()['IoArgoprojWorkflowV1alpha1WorkflowTemplateRef'] = IoArgoprojWorkflowV1alpha1WorkflowTemplateRef - globals()['ObjectMeta'] = ObjectMeta - - -class IoArgoprojWorkflowV1alpha1Submit(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'workflow_template_ref': (IoArgoprojWorkflowV1alpha1WorkflowTemplateRef,), # noqa: E501 - 'arguments': (IoArgoprojWorkflowV1alpha1Arguments,), # noqa: E501 - 'metadata': (ObjectMeta,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'workflow_template_ref': 'workflowTemplateRef', # noqa: E501 - 'arguments': 'arguments', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, workflow_template_ref, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Submit - a model defined in OpenAPI - - Args: - workflow_template_ref (IoArgoprojWorkflowV1alpha1WorkflowTemplateRef): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - metadata (ObjectMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.workflow_template_ref = workflow_template_ref - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, workflow_template_ref, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Submit - a model defined in OpenAPI - - Args: - workflow_template_ref (IoArgoprojWorkflowV1alpha1WorkflowTemplateRef): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - metadata (ObjectMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.workflow_template_ref = workflow_template_ref - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_submit_opts.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_submit_opts.py deleted file mode 100644 index a36addde4cd6..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_submit_opts.py +++ /dev/null @@ -1,305 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.owner_reference import OwnerReference - globals()['OwnerReference'] = OwnerReference - - -class IoArgoprojWorkflowV1alpha1SubmitOpts(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'annotations': (str,), # noqa: E501 - 'dry_run': (bool,), # noqa: E501 - 'entry_point': (str,), # noqa: E501 - 'generate_name': (str,), # noqa: E501 - 'labels': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'owner_reference': (OwnerReference,), # noqa: E501 - 'parameters': ([str],), # noqa: E501 - 'pod_priority_class_name': (str,), # noqa: E501 - 'priority': (int,), # noqa: E501 - 'server_dry_run': (bool,), # noqa: E501 - 'service_account': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'annotations': 'annotations', # noqa: E501 - 'dry_run': 'dryRun', # noqa: E501 - 'entry_point': 'entryPoint', # noqa: E501 - 'generate_name': 'generateName', # noqa: E501 - 'labels': 'labels', # noqa: E501 - 'name': 'name', # noqa: E501 - 'owner_reference': 'ownerReference', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'pod_priority_class_name': 'podPriorityClassName', # noqa: E501 - 'priority': 'priority', # noqa: E501 - 'server_dry_run': 'serverDryRun', # noqa: E501 - 'service_account': 'serviceAccount', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SubmitOpts - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations (str): Annotations adds to metadata.labels. [optional] # noqa: E501 - dry_run (bool): DryRun validates the workflow on the client-side without creating it. This option is not supported in API. [optional] # noqa: E501 - entry_point (str): Entrypoint overrides spec.entrypoint. [optional] # noqa: E501 - generate_name (str): GenerateName overrides metadata.generateName. [optional] # noqa: E501 - labels (str): Labels adds to metadata.labels. [optional] # noqa: E501 - name (str): Name overrides metadata.name. [optional] # noqa: E501 - owner_reference (OwnerReference): [optional] # noqa: E501 - parameters ([str]): Parameters passes input parameters to workflow. [optional] # noqa: E501 - pod_priority_class_name (str): Set the podPriorityClassName of the workflow. [optional] # noqa: E501 - priority (int): Priority is used if controller is configured to process limited number of workflows in parallel, higher priority workflows are processed first.. [optional] # noqa: E501 - server_dry_run (bool): ServerDryRun validates the workflow on the server-side without creating it. [optional] # noqa: E501 - service_account (str): ServiceAccount runs all pods in the workflow using specified ServiceAccount.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SubmitOpts - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations (str): Annotations adds to metadata.labels. [optional] # noqa: E501 - dry_run (bool): DryRun validates the workflow on the client-side without creating it. This option is not supported in API. [optional] # noqa: E501 - entry_point (str): Entrypoint overrides spec.entrypoint. [optional] # noqa: E501 - generate_name (str): GenerateName overrides metadata.generateName. [optional] # noqa: E501 - labels (str): Labels adds to metadata.labels. [optional] # noqa: E501 - name (str): Name overrides metadata.name. [optional] # noqa: E501 - owner_reference (OwnerReference): [optional] # noqa: E501 - parameters ([str]): Parameters passes input parameters to workflow. [optional] # noqa: E501 - pod_priority_class_name (str): Set the podPriorityClassName of the workflow. [optional] # noqa: E501 - priority (int): Priority is used if controller is configured to process limited number of workflows in parallel, higher priority workflows are processed first.. [optional] # noqa: E501 - server_dry_run (bool): ServerDryRun validates the workflow on the server-side without creating it. [optional] # noqa: E501 - service_account (str): ServiceAccount runs all pods in the workflow using specified ServiceAccount.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_suspend_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_suspend_template.py deleted file mode 100644 index 2215751d3974..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_suspend_template.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1SuspendTemplate(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'duration': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'duration': 'duration', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SuspendTemplate - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (str): Duration is the seconds to wait before automatically resuming a template. Must be a string. Default unit is seconds. Could also be a Duration, e.g.: \"2m\", \"6h\". [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SuspendTemplate - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - duration (str): Duration is the seconds to wait before automatically resuming a template. Must be a string. Default unit is seconds. Could also be a Duration, e.g.: \"2m\", \"6h\". [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_synchronization.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_synchronization.py deleted file mode 100644 index 3c3a131cbb58..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_synchronization.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_mutex import IoArgoprojWorkflowV1alpha1Mutex - from argo_workflows.model.io_argoproj_workflow_v1alpha1_semaphore_ref import IoArgoprojWorkflowV1alpha1SemaphoreRef - globals()['IoArgoprojWorkflowV1alpha1Mutex'] = IoArgoprojWorkflowV1alpha1Mutex - globals()['IoArgoprojWorkflowV1alpha1SemaphoreRef'] = IoArgoprojWorkflowV1alpha1SemaphoreRef - - -class IoArgoprojWorkflowV1alpha1Synchronization(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'mutex': (IoArgoprojWorkflowV1alpha1Mutex,), # noqa: E501 - 'semaphore': (IoArgoprojWorkflowV1alpha1SemaphoreRef,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'mutex': 'mutex', # noqa: E501 - 'semaphore': 'semaphore', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Synchronization - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mutex (IoArgoprojWorkflowV1alpha1Mutex): [optional] # noqa: E501 - semaphore (IoArgoprojWorkflowV1alpha1SemaphoreRef): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Synchronization - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mutex (IoArgoprojWorkflowV1alpha1Mutex): [optional] # noqa: E501 - semaphore (IoArgoprojWorkflowV1alpha1SemaphoreRef): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_synchronization_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_synchronization_status.py deleted file mode 100644 index e3c26a871829..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_synchronization_status.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_mutex_status import IoArgoprojWorkflowV1alpha1MutexStatus - from argo_workflows.model.io_argoproj_workflow_v1alpha1_semaphore_status import IoArgoprojWorkflowV1alpha1SemaphoreStatus - globals()['IoArgoprojWorkflowV1alpha1MutexStatus'] = IoArgoprojWorkflowV1alpha1MutexStatus - globals()['IoArgoprojWorkflowV1alpha1SemaphoreStatus'] = IoArgoprojWorkflowV1alpha1SemaphoreStatus - - -class IoArgoprojWorkflowV1alpha1SynchronizationStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'mutex': (IoArgoprojWorkflowV1alpha1MutexStatus,), # noqa: E501 - 'semaphore': (IoArgoprojWorkflowV1alpha1SemaphoreStatus,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'mutex': 'mutex', # noqa: E501 - 'semaphore': 'semaphore', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SynchronizationStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mutex (IoArgoprojWorkflowV1alpha1MutexStatus): [optional] # noqa: E501 - semaphore (IoArgoprojWorkflowV1alpha1SemaphoreStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1SynchronizationStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mutex (IoArgoprojWorkflowV1alpha1MutexStatus): [optional] # noqa: E501 - semaphore (IoArgoprojWorkflowV1alpha1SemaphoreStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_tar_strategy.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_tar_strategy.py deleted file mode 100644 index d98eb3b97e46..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_tar_strategy.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1TarStrategy(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'compression_level': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'compression_level': 'compressionLevel', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1TarStrategy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - compression_level (int): CompressionLevel specifies the gzip compression level to use for the artifact. Defaults to gzip.DefaultCompression.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1TarStrategy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - compression_level (int): CompressionLevel specifies the gzip compression level to use for the artifact. Defaults to gzip.DefaultCompression.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_template.py deleted file mode 100644 index 13d05724dd34..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_template.py +++ /dev/null @@ -1,459 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.affinity import Affinity - from argo_workflows.model.container import Container - from argo_workflows.model.host_alias import HostAlias - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation - from argo_workflows.model.io_argoproj_workflow_v1alpha1_container_set_template import IoArgoprojWorkflowV1alpha1ContainerSetTemplate - from argo_workflows.model.io_argoproj_workflow_v1alpha1_dag_template import IoArgoprojWorkflowV1alpha1DAGTemplate - from argo_workflows.model.io_argoproj_workflow_v1alpha1_data import IoArgoprojWorkflowV1alpha1Data - from argo_workflows.model.io_argoproj_workflow_v1alpha1_executor_config import IoArgoprojWorkflowV1alpha1ExecutorConfig - from argo_workflows.model.io_argoproj_workflow_v1alpha1_http import IoArgoprojWorkflowV1alpha1HTTP - from argo_workflows.model.io_argoproj_workflow_v1alpha1_inputs import IoArgoprojWorkflowV1alpha1Inputs - from argo_workflows.model.io_argoproj_workflow_v1alpha1_memoize import IoArgoprojWorkflowV1alpha1Memoize - from argo_workflows.model.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata - from argo_workflows.model.io_argoproj_workflow_v1alpha1_metrics import IoArgoprojWorkflowV1alpha1Metrics - from argo_workflows.model.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs - from argo_workflows.model.io_argoproj_workflow_v1alpha1_parallel_steps import IoArgoprojWorkflowV1alpha1ParallelSteps - from argo_workflows.model.io_argoproj_workflow_v1alpha1_resource_template import IoArgoprojWorkflowV1alpha1ResourceTemplate - from argo_workflows.model.io_argoproj_workflow_v1alpha1_retry_strategy import IoArgoprojWorkflowV1alpha1RetryStrategy - from argo_workflows.model.io_argoproj_workflow_v1alpha1_script_template import IoArgoprojWorkflowV1alpha1ScriptTemplate - from argo_workflows.model.io_argoproj_workflow_v1alpha1_suspend_template import IoArgoprojWorkflowV1alpha1SuspendTemplate - from argo_workflows.model.io_argoproj_workflow_v1alpha1_synchronization import IoArgoprojWorkflowV1alpha1Synchronization - from argo_workflows.model.io_argoproj_workflow_v1alpha1_user_container import IoArgoprojWorkflowV1alpha1UserContainer - from argo_workflows.model.pod_security_context import PodSecurityContext - from argo_workflows.model.toleration import Toleration - from argo_workflows.model.volume import Volume - globals()['Affinity'] = Affinity - globals()['Container'] = Container - globals()['HostAlias'] = HostAlias - globals()['IoArgoprojWorkflowV1alpha1ArtifactLocation'] = IoArgoprojWorkflowV1alpha1ArtifactLocation - globals()['IoArgoprojWorkflowV1alpha1ContainerSetTemplate'] = IoArgoprojWorkflowV1alpha1ContainerSetTemplate - globals()['IoArgoprojWorkflowV1alpha1DAGTemplate'] = IoArgoprojWorkflowV1alpha1DAGTemplate - globals()['IoArgoprojWorkflowV1alpha1Data'] = IoArgoprojWorkflowV1alpha1Data - globals()['IoArgoprojWorkflowV1alpha1ExecutorConfig'] = IoArgoprojWorkflowV1alpha1ExecutorConfig - globals()['IoArgoprojWorkflowV1alpha1HTTP'] = IoArgoprojWorkflowV1alpha1HTTP - globals()['IoArgoprojWorkflowV1alpha1Inputs'] = IoArgoprojWorkflowV1alpha1Inputs - globals()['IoArgoprojWorkflowV1alpha1Memoize'] = IoArgoprojWorkflowV1alpha1Memoize - globals()['IoArgoprojWorkflowV1alpha1Metadata'] = IoArgoprojWorkflowV1alpha1Metadata - globals()['IoArgoprojWorkflowV1alpha1Metrics'] = IoArgoprojWorkflowV1alpha1Metrics - globals()['IoArgoprojWorkflowV1alpha1Outputs'] = IoArgoprojWorkflowV1alpha1Outputs - globals()['IoArgoprojWorkflowV1alpha1ParallelSteps'] = IoArgoprojWorkflowV1alpha1ParallelSteps - globals()['IoArgoprojWorkflowV1alpha1ResourceTemplate'] = IoArgoprojWorkflowV1alpha1ResourceTemplate - globals()['IoArgoprojWorkflowV1alpha1RetryStrategy'] = IoArgoprojWorkflowV1alpha1RetryStrategy - globals()['IoArgoprojWorkflowV1alpha1ScriptTemplate'] = IoArgoprojWorkflowV1alpha1ScriptTemplate - globals()['IoArgoprojWorkflowV1alpha1SuspendTemplate'] = IoArgoprojWorkflowV1alpha1SuspendTemplate - globals()['IoArgoprojWorkflowV1alpha1Synchronization'] = IoArgoprojWorkflowV1alpha1Synchronization - globals()['IoArgoprojWorkflowV1alpha1UserContainer'] = IoArgoprojWorkflowV1alpha1UserContainer - globals()['PodSecurityContext'] = PodSecurityContext - globals()['Toleration'] = Toleration - globals()['Volume'] = Volume - - -class IoArgoprojWorkflowV1alpha1Template(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'active_deadline_seconds': (str,), # noqa: E501 - 'affinity': (Affinity,), # noqa: E501 - 'archive_location': (IoArgoprojWorkflowV1alpha1ArtifactLocation,), # noqa: E501 - 'automount_service_account_token': (bool,), # noqa: E501 - 'container': (Container,), # noqa: E501 - 'container_set': (IoArgoprojWorkflowV1alpha1ContainerSetTemplate,), # noqa: E501 - 'daemon': (bool,), # noqa: E501 - 'dag': (IoArgoprojWorkflowV1alpha1DAGTemplate,), # noqa: E501 - 'data': (IoArgoprojWorkflowV1alpha1Data,), # noqa: E501 - 'executor': (IoArgoprojWorkflowV1alpha1ExecutorConfig,), # noqa: E501 - 'fail_fast': (bool,), # noqa: E501 - 'host_aliases': ([HostAlias],), # noqa: E501 - 'http': (IoArgoprojWorkflowV1alpha1HTTP,), # noqa: E501 - 'init_containers': ([IoArgoprojWorkflowV1alpha1UserContainer],), # noqa: E501 - 'inputs': (IoArgoprojWorkflowV1alpha1Inputs,), # noqa: E501 - 'memoize': (IoArgoprojWorkflowV1alpha1Memoize,), # noqa: E501 - 'metadata': (IoArgoprojWorkflowV1alpha1Metadata,), # noqa: E501 - 'metrics': (IoArgoprojWorkflowV1alpha1Metrics,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'node_selector': ({str: (str,)},), # noqa: E501 - 'outputs': (IoArgoprojWorkflowV1alpha1Outputs,), # noqa: E501 - 'parallelism': (int,), # noqa: E501 - 'plugin': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501 - 'pod_spec_patch': (str,), # noqa: E501 - 'priority': (int,), # noqa: E501 - 'priority_class_name': (str,), # noqa: E501 - 'resource': (IoArgoprojWorkflowV1alpha1ResourceTemplate,), # noqa: E501 - 'retry_strategy': (IoArgoprojWorkflowV1alpha1RetryStrategy,), # noqa: E501 - 'scheduler_name': (str,), # noqa: E501 - 'script': (IoArgoprojWorkflowV1alpha1ScriptTemplate,), # noqa: E501 - 'security_context': (PodSecurityContext,), # noqa: E501 - 'service_account_name': (str,), # noqa: E501 - 'sidecars': ([IoArgoprojWorkflowV1alpha1UserContainer],), # noqa: E501 - 'steps': ([IoArgoprojWorkflowV1alpha1ParallelSteps],), # noqa: E501 - 'suspend': (IoArgoprojWorkflowV1alpha1SuspendTemplate,), # noqa: E501 - 'synchronization': (IoArgoprojWorkflowV1alpha1Synchronization,), # noqa: E501 - 'timeout': (str,), # noqa: E501 - 'tolerations': ([Toleration],), # noqa: E501 - 'volumes': ([Volume],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'active_deadline_seconds': 'activeDeadlineSeconds', # noqa: E501 - 'affinity': 'affinity', # noqa: E501 - 'archive_location': 'archiveLocation', # noqa: E501 - 'automount_service_account_token': 'automountServiceAccountToken', # noqa: E501 - 'container': 'container', # noqa: E501 - 'container_set': 'containerSet', # noqa: E501 - 'daemon': 'daemon', # noqa: E501 - 'dag': 'dag', # noqa: E501 - 'data': 'data', # noqa: E501 - 'executor': 'executor', # noqa: E501 - 'fail_fast': 'failFast', # noqa: E501 - 'host_aliases': 'hostAliases', # noqa: E501 - 'http': 'http', # noqa: E501 - 'init_containers': 'initContainers', # noqa: E501 - 'inputs': 'inputs', # noqa: E501 - 'memoize': 'memoize', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'metrics': 'metrics', # noqa: E501 - 'name': 'name', # noqa: E501 - 'node_selector': 'nodeSelector', # noqa: E501 - 'outputs': 'outputs', # noqa: E501 - 'parallelism': 'parallelism', # noqa: E501 - 'plugin': 'plugin', # noqa: E501 - 'pod_spec_patch': 'podSpecPatch', # noqa: E501 - 'priority': 'priority', # noqa: E501 - 'priority_class_name': 'priorityClassName', # noqa: E501 - 'resource': 'resource', # noqa: E501 - 'retry_strategy': 'retryStrategy', # noqa: E501 - 'scheduler_name': 'schedulerName', # noqa: E501 - 'script': 'script', # noqa: E501 - 'security_context': 'securityContext', # noqa: E501 - 'service_account_name': 'serviceAccountName', # noqa: E501 - 'sidecars': 'sidecars', # noqa: E501 - 'steps': 'steps', # noqa: E501 - 'suspend': 'suspend', # noqa: E501 - 'synchronization': 'synchronization', # noqa: E501 - 'timeout': 'timeout', # noqa: E501 - 'tolerations': 'tolerations', # noqa: E501 - 'volumes': 'volumes', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Template - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - active_deadline_seconds (str): [optional] # noqa: E501 - affinity (Affinity): [optional] # noqa: E501 - archive_location (IoArgoprojWorkflowV1alpha1ArtifactLocation): [optional] # noqa: E501 - automount_service_account_token (bool): AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false.. [optional] # noqa: E501 - container (Container): [optional] # noqa: E501 - container_set (IoArgoprojWorkflowV1alpha1ContainerSetTemplate): [optional] # noqa: E501 - daemon (bool): Daemon will allow a workflow to proceed to the next step so long as the container reaches readiness. [optional] # noqa: E501 - dag (IoArgoprojWorkflowV1alpha1DAGTemplate): [optional] # noqa: E501 - data (IoArgoprojWorkflowV1alpha1Data): [optional] # noqa: E501 - executor (IoArgoprojWorkflowV1alpha1ExecutorConfig): [optional] # noqa: E501 - fail_fast (bool): FailFast, if specified, will fail this template if any of its child pods has failed. This is useful for when this template is expanded with `withItems`, etc.. [optional] # noqa: E501 - host_aliases ([HostAlias]): HostAliases is an optional list of hosts and IPs that will be injected into the pod spec. [optional] # noqa: E501 - http (IoArgoprojWorkflowV1alpha1HTTP): [optional] # noqa: E501 - init_containers ([IoArgoprojWorkflowV1alpha1UserContainer]): InitContainers is a list of containers which run before the main container.. [optional] # noqa: E501 - inputs (IoArgoprojWorkflowV1alpha1Inputs): [optional] # noqa: E501 - memoize (IoArgoprojWorkflowV1alpha1Memoize): [optional] # noqa: E501 - metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - metrics (IoArgoprojWorkflowV1alpha1Metrics): [optional] # noqa: E501 - name (str): Name is the name of the template. [optional] # noqa: E501 - node_selector ({str: (str,)}): NodeSelector is a selector to schedule this step of the workflow to be run on the selected node(s). Overrides the selector set at the workflow level.. [optional] # noqa: E501 - outputs (IoArgoprojWorkflowV1alpha1Outputs): [optional] # noqa: E501 - parallelism (int): Parallelism limits the max total parallel pods that can execute at the same time within the boundaries of this template invocation. If additional steps/dag templates are invoked, the pods created by those templates will not be counted towards this total.. [optional] # noqa: E501 - plugin (bool, date, datetime, dict, float, int, list, str, none_type): Plugin is an Object with exactly one key. [optional] # noqa: E501 - pod_spec_patch (str): PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits).. [optional] # noqa: E501 - priority (int): Priority to apply to workflow pods.. [optional] # noqa: E501 - priority_class_name (str): PriorityClassName to apply to workflow pods.. [optional] # noqa: E501 - resource (IoArgoprojWorkflowV1alpha1ResourceTemplate): [optional] # noqa: E501 - retry_strategy (IoArgoprojWorkflowV1alpha1RetryStrategy): [optional] # noqa: E501 - scheduler_name (str): If specified, the pod will be dispatched by specified scheduler. Or it will be dispatched by workflow scope scheduler if specified. If neither specified, the pod will be dispatched by default scheduler.. [optional] # noqa: E501 - script (IoArgoprojWorkflowV1alpha1ScriptTemplate): [optional] # noqa: E501 - security_context (PodSecurityContext): [optional] # noqa: E501 - service_account_name (str): ServiceAccountName to apply to workflow pods. [optional] # noqa: E501 - sidecars ([IoArgoprojWorkflowV1alpha1UserContainer]): Sidecars is a list of containers which run alongside the main container Sidecars are automatically killed when the main container completes. [optional] # noqa: E501 - steps ([IoArgoprojWorkflowV1alpha1ParallelSteps]): Steps define a series of sequential/parallel workflow steps. [optional] # noqa: E501 - suspend (IoArgoprojWorkflowV1alpha1SuspendTemplate): [optional] # noqa: E501 - synchronization (IoArgoprojWorkflowV1alpha1Synchronization): [optional] # noqa: E501 - timeout (str): Timeout allows to set the total node execution timeout duration counting from the node's start time. This duration also includes time in which the node spends in Pending state. This duration may not be applied to Step or DAG templates.. [optional] # noqa: E501 - tolerations ([Toleration]): Tolerations to apply to workflow pods.. [optional] # noqa: E501 - volumes ([Volume]): Volumes is a list of volumes that can be mounted by containers in a template.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Template - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - active_deadline_seconds (str): [optional] # noqa: E501 - affinity (Affinity): [optional] # noqa: E501 - archive_location (IoArgoprojWorkflowV1alpha1ArtifactLocation): [optional] # noqa: E501 - automount_service_account_token (bool): AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false.. [optional] # noqa: E501 - container (Container): [optional] # noqa: E501 - container_set (IoArgoprojWorkflowV1alpha1ContainerSetTemplate): [optional] # noqa: E501 - daemon (bool): Daemon will allow a workflow to proceed to the next step so long as the container reaches readiness. [optional] # noqa: E501 - dag (IoArgoprojWorkflowV1alpha1DAGTemplate): [optional] # noqa: E501 - data (IoArgoprojWorkflowV1alpha1Data): [optional] # noqa: E501 - executor (IoArgoprojWorkflowV1alpha1ExecutorConfig): [optional] # noqa: E501 - fail_fast (bool): FailFast, if specified, will fail this template if any of its child pods has failed. This is useful for when this template is expanded with `withItems`, etc.. [optional] # noqa: E501 - host_aliases ([HostAlias]): HostAliases is an optional list of hosts and IPs that will be injected into the pod spec. [optional] # noqa: E501 - http (IoArgoprojWorkflowV1alpha1HTTP): [optional] # noqa: E501 - init_containers ([IoArgoprojWorkflowV1alpha1UserContainer]): InitContainers is a list of containers which run before the main container.. [optional] # noqa: E501 - inputs (IoArgoprojWorkflowV1alpha1Inputs): [optional] # noqa: E501 - memoize (IoArgoprojWorkflowV1alpha1Memoize): [optional] # noqa: E501 - metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - metrics (IoArgoprojWorkflowV1alpha1Metrics): [optional] # noqa: E501 - name (str): Name is the name of the template. [optional] # noqa: E501 - node_selector ({str: (str,)}): NodeSelector is a selector to schedule this step of the workflow to be run on the selected node(s). Overrides the selector set at the workflow level.. [optional] # noqa: E501 - outputs (IoArgoprojWorkflowV1alpha1Outputs): [optional] # noqa: E501 - parallelism (int): Parallelism limits the max total parallel pods that can execute at the same time within the boundaries of this template invocation. If additional steps/dag templates are invoked, the pods created by those templates will not be counted towards this total.. [optional] # noqa: E501 - plugin (bool, date, datetime, dict, float, int, list, str, none_type): Plugin is an Object with exactly one key. [optional] # noqa: E501 - pod_spec_patch (str): PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits).. [optional] # noqa: E501 - priority (int): Priority to apply to workflow pods.. [optional] # noqa: E501 - priority_class_name (str): PriorityClassName to apply to workflow pods.. [optional] # noqa: E501 - resource (IoArgoprojWorkflowV1alpha1ResourceTemplate): [optional] # noqa: E501 - retry_strategy (IoArgoprojWorkflowV1alpha1RetryStrategy): [optional] # noqa: E501 - scheduler_name (str): If specified, the pod will be dispatched by specified scheduler. Or it will be dispatched by workflow scope scheduler if specified. If neither specified, the pod will be dispatched by default scheduler.. [optional] # noqa: E501 - script (IoArgoprojWorkflowV1alpha1ScriptTemplate): [optional] # noqa: E501 - security_context (PodSecurityContext): [optional] # noqa: E501 - service_account_name (str): ServiceAccountName to apply to workflow pods. [optional] # noqa: E501 - sidecars ([IoArgoprojWorkflowV1alpha1UserContainer]): Sidecars is a list of containers which run alongside the main container Sidecars are automatically killed when the main container completes. [optional] # noqa: E501 - steps ([IoArgoprojWorkflowV1alpha1ParallelSteps]): Steps define a series of sequential/parallel workflow steps. [optional] # noqa: E501 - suspend (IoArgoprojWorkflowV1alpha1SuspendTemplate): [optional] # noqa: E501 - synchronization (IoArgoprojWorkflowV1alpha1Synchronization): [optional] # noqa: E501 - timeout (str): Timeout allows to set the total node execution timeout duration counting from the node's start time. This duration also includes time in which the node spends in Pending state. This duration may not be applied to Step or DAG templates.. [optional] # noqa: E501 - tolerations ([Toleration]): Tolerations to apply to workflow pods.. [optional] # noqa: E501 - volumes ([Volume]): Volumes is a list of volumes that can be mounted by containers in a template.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_template_ref.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_template_ref.py deleted file mode 100644 index 3ae5eab1c725..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_template_ref.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1TemplateRef(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'cluster_scope': (bool,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'template': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cluster_scope': 'clusterScope', # noqa: E501 - 'name': 'name', # noqa: E501 - 'template': 'template', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1TemplateRef - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cluster_scope (bool): ClusterScope indicates the referred template is cluster scoped (i.e. a ClusterWorkflowTemplate).. [optional] # noqa: E501 - name (str): Name is the resource name of the template.. [optional] # noqa: E501 - template (str): Template is the name of referred template in the resource.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1TemplateRef - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cluster_scope (bool): ClusterScope indicates the referred template is cluster scoped (i.e. a ClusterWorkflowTemplate).. [optional] # noqa: E501 - name (str): Name is the resource name of the template.. [optional] # noqa: E501 - template (str): Template is the name of referred template in the resource.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_transformation_step.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_transformation_step.py deleted file mode 100644 index c7f1730f4923..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_transformation_step.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1TransformationStep(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'expression': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'expression': 'expression', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, expression, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1TransformationStep - a model defined in OpenAPI - - Args: - expression (str): Expression defines an expr expression to apply - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.expression = expression - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, expression, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1TransformationStep - a model defined in OpenAPI - - Args: - expression (str): Expression defines an expr expression to apply - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.expression = expression - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_ttl_strategy.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_ttl_strategy.py deleted file mode 100644 index 5304b35f9ef3..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_ttl_strategy.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1TTLStrategy(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'seconds_after_completion': (int,), # noqa: E501 - 'seconds_after_failure': (int,), # noqa: E501 - 'seconds_after_success': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'seconds_after_completion': 'secondsAfterCompletion', # noqa: E501 - 'seconds_after_failure': 'secondsAfterFailure', # noqa: E501 - 'seconds_after_success': 'secondsAfterSuccess', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1TTLStrategy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - seconds_after_completion (int): SecondsAfterCompletion is the number of seconds to live after completion. [optional] # noqa: E501 - seconds_after_failure (int): SecondsAfterFailure is the number of seconds to live after failure. [optional] # noqa: E501 - seconds_after_success (int): SecondsAfterSuccess is the number of seconds to live after success. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1TTLStrategy - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - seconds_after_completion (int): SecondsAfterCompletion is the number of seconds to live after completion. [optional] # noqa: E501 - seconds_after_failure (int): SecondsAfterFailure is the number of seconds to live after failure. [optional] # noqa: E501 - seconds_after_success (int): SecondsAfterSuccess is the number of seconds to live after success. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_update_cron_workflow_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_update_cron_workflow_request.py deleted file mode 100644 index cf698ecd3649..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_update_cron_workflow_request.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow - globals()['IoArgoprojWorkflowV1alpha1CronWorkflow'] = IoArgoprojWorkflowV1alpha1CronWorkflow - - -class IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'cron_workflow': (IoArgoprojWorkflowV1alpha1CronWorkflow,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cron_workflow': 'cronWorkflow', # noqa: E501 - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cron_workflow (IoArgoprojWorkflowV1alpha1CronWorkflow): [optional] # noqa: E501 - name (str): DEPRECATED: This field is ignored.. [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cron_workflow (IoArgoprojWorkflowV1alpha1CronWorkflow): [optional] # noqa: E501 - name (str): DEPRECATED: This field is ignored.. [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_user_container.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_user_container.py deleted file mode 100644 index e34e9a108e5b..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_user_container.py +++ /dev/null @@ -1,371 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.container_port import ContainerPort - from argo_workflows.model.env_from_source import EnvFromSource - from argo_workflows.model.env_var import EnvVar - from argo_workflows.model.lifecycle import Lifecycle - from argo_workflows.model.probe import Probe - from argo_workflows.model.resource_requirements import ResourceRequirements - from argo_workflows.model.security_context import SecurityContext - from argo_workflows.model.volume_device import VolumeDevice - from argo_workflows.model.volume_mount import VolumeMount - globals()['ContainerPort'] = ContainerPort - globals()['EnvFromSource'] = EnvFromSource - globals()['EnvVar'] = EnvVar - globals()['Lifecycle'] = Lifecycle - globals()['Probe'] = Probe - globals()['ResourceRequirements'] = ResourceRequirements - globals()['SecurityContext'] = SecurityContext - globals()['VolumeDevice'] = VolumeDevice - globals()['VolumeMount'] = VolumeMount - - -class IoArgoprojWorkflowV1alpha1UserContainer(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'args': ([str],), # noqa: E501 - 'command': ([str],), # noqa: E501 - 'env': ([EnvVar],), # noqa: E501 - 'env_from': ([EnvFromSource],), # noqa: E501 - 'image': (str,), # noqa: E501 - 'image_pull_policy': (str,), # noqa: E501 - 'lifecycle': (Lifecycle,), # noqa: E501 - 'liveness_probe': (Probe,), # noqa: E501 - 'mirror_volume_mounts': (bool,), # noqa: E501 - 'ports': ([ContainerPort],), # noqa: E501 - 'readiness_probe': (Probe,), # noqa: E501 - 'resources': (ResourceRequirements,), # noqa: E501 - 'security_context': (SecurityContext,), # noqa: E501 - 'startup_probe': (Probe,), # noqa: E501 - 'stdin': (bool,), # noqa: E501 - 'stdin_once': (bool,), # noqa: E501 - 'termination_message_path': (str,), # noqa: E501 - 'termination_message_policy': (str,), # noqa: E501 - 'tty': (bool,), # noqa: E501 - 'volume_devices': ([VolumeDevice],), # noqa: E501 - 'volume_mounts': ([VolumeMount],), # noqa: E501 - 'working_dir': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'args': 'args', # noqa: E501 - 'command': 'command', # noqa: E501 - 'env': 'env', # noqa: E501 - 'env_from': 'envFrom', # noqa: E501 - 'image': 'image', # noqa: E501 - 'image_pull_policy': 'imagePullPolicy', # noqa: E501 - 'lifecycle': 'lifecycle', # noqa: E501 - 'liveness_probe': 'livenessProbe', # noqa: E501 - 'mirror_volume_mounts': 'mirrorVolumeMounts', # noqa: E501 - 'ports': 'ports', # noqa: E501 - 'readiness_probe': 'readinessProbe', # noqa: E501 - 'resources': 'resources', # noqa: E501 - 'security_context': 'securityContext', # noqa: E501 - 'startup_probe': 'startupProbe', # noqa: E501 - 'stdin': 'stdin', # noqa: E501 - 'stdin_once': 'stdinOnce', # noqa: E501 - 'termination_message_path': 'terminationMessagePath', # noqa: E501 - 'termination_message_policy': 'terminationMessagePolicy', # noqa: E501 - 'tty': 'tty', # noqa: E501 - 'volume_devices': 'volumeDevices', # noqa: E501 - 'volume_mounts': 'volumeMounts', # noqa: E501 - 'working_dir': 'workingDir', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1UserContainer - a model defined in OpenAPI - - Args: - name (str): Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 - env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 - image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 - lifecycle (Lifecycle): [optional] # noqa: E501 - liveness_probe (Probe): [optional] # noqa: E501 - mirror_volume_mounts (bool): MirrorVolumeMounts will mount the same volumes specified in the main container to the container (including artifacts), at the same mountPaths. This enables dind daemon to partially see the same filesystem as the main container in order to use features such as docker volume binding. [optional] # noqa: E501 - ports ([ContainerPort]): List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated.. [optional] # noqa: E501 - readiness_probe (Probe): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - security_context (SecurityContext): [optional] # noqa: E501 - startup_probe (Probe): [optional] # noqa: E501 - stdin (bool): Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.. [optional] # noqa: E501 - stdin_once (bool): Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false. [optional] # noqa: E501 - termination_message_path (str): Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.. [optional] # noqa: E501 - termination_message_policy (str): Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated.. [optional] # noqa: E501 - tty (bool): Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.. [optional] # noqa: E501 - volume_devices ([VolumeDevice]): volumeDevices is the list of block devices to be used by the container.. [optional] # noqa: E501 - volume_mounts ([VolumeMount]): Pod volumes to mount into the container's filesystem. Cannot be updated.. [optional] # noqa: E501 - working_dir (str): Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1UserContainer - a model defined in OpenAPI - - Args: - name (str): Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - args ([str]): Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - command ([str]): Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell. [optional] # noqa: E501 - env ([EnvVar]): List of environment variables to set in the container. Cannot be updated.. [optional] # noqa: E501 - env_from ([EnvFromSource]): List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.. [optional] # noqa: E501 - image (str): Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 - image_pull_policy (str): Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images. [optional] # noqa: E501 - lifecycle (Lifecycle): [optional] # noqa: E501 - liveness_probe (Probe): [optional] # noqa: E501 - mirror_volume_mounts (bool): MirrorVolumeMounts will mount the same volumes specified in the main container to the container (including artifacts), at the same mountPaths. This enables dind daemon to partially see the same filesystem as the main container in order to use features such as docker volume binding. [optional] # noqa: E501 - ports ([ContainerPort]): List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated.. [optional] # noqa: E501 - readiness_probe (Probe): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - security_context (SecurityContext): [optional] # noqa: E501 - startup_probe (Probe): [optional] # noqa: E501 - stdin (bool): Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.. [optional] # noqa: E501 - stdin_once (bool): Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false. [optional] # noqa: E501 - termination_message_path (str): Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.. [optional] # noqa: E501 - termination_message_policy (str): Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated.. [optional] # noqa: E501 - tty (bool): Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.. [optional] # noqa: E501 - volume_devices ([VolumeDevice]): volumeDevices is the list of block devices to be used by the container.. [optional] # noqa: E501 - volume_mounts ([VolumeMount]): Pod volumes to mount into the container's filesystem. Cannot be updated.. [optional] # noqa: E501 - working_dir (str): Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_value_from.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_value_from.py deleted file mode 100644 index 87dc28fb6e66..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_value_from.py +++ /dev/null @@ -1,293 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - - -class IoArgoprojWorkflowV1alpha1ValueFrom(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'config_map_key_ref': (ConfigMapKeySelector,), # noqa: E501 - 'default': (str,), # noqa: E501 - 'event': (str,), # noqa: E501 - 'expression': (str,), # noqa: E501 - 'jq_filter': (str,), # noqa: E501 - 'json_path': (str,), # noqa: E501 - 'parameter': (str,), # noqa: E501 - 'path': (str,), # noqa: E501 - 'supplied': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'config_map_key_ref': 'configMapKeyRef', # noqa: E501 - 'default': 'default', # noqa: E501 - 'event': 'event', # noqa: E501 - 'expression': 'expression', # noqa: E501 - 'jq_filter': 'jqFilter', # noqa: E501 - 'json_path': 'jsonPath', # noqa: E501 - 'parameter': 'parameter', # noqa: E501 - 'path': 'path', # noqa: E501 - 'supplied': 'supplied', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ValueFrom - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 - default (str): Default specifies a value to be used if retrieving the value from the specified source fails. [optional] # noqa: E501 - event (str): Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`. [optional] # noqa: E501 - expression (str): Expression, if defined, is evaluated to specify the value for the parameter. [optional] # noqa: E501 - jq_filter (str): JQFilter expression against the resource object in resource templates. [optional] # noqa: E501 - json_path (str): JSONPath of a resource to retrieve an output parameter value from in resource templates. [optional] # noqa: E501 - parameter (str): Parameter reference to a step or dag task in which to retrieve an output parameter value from (e.g. '{{steps.mystep.outputs.myparam}}'). [optional] # noqa: E501 - path (str): Path in the container to retrieve an output parameter value from in container templates. [optional] # noqa: E501 - supplied (bool, date, datetime, dict, float, int, list, str, none_type): SuppliedValueFrom is a placeholder for a value to be filled in directly, either through the CLI, API, etc.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1ValueFrom - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 - default (str): Default specifies a value to be used if retrieving the value from the specified source fails. [optional] # noqa: E501 - event (str): Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`. [optional] # noqa: E501 - expression (str): Expression, if defined, is evaluated to specify the value for the parameter. [optional] # noqa: E501 - jq_filter (str): JQFilter expression against the resource object in resource templates. [optional] # noqa: E501 - json_path (str): JSONPath of a resource to retrieve an output parameter value from in resource templates. [optional] # noqa: E501 - parameter (str): Parameter reference to a step or dag task in which to retrieve an output parameter value from (e.g. '{{steps.mystep.outputs.myparam}}'). [optional] # noqa: E501 - path (str): Path in the container to retrieve an output parameter value from in container templates. [optional] # noqa: E501 - supplied (bool, date, datetime, dict, float, int, list, str, none_type): SuppliedValueFrom is a placeholder for a value to be filled in directly, either through the CLI, API, etc.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_version.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_version.py deleted file mode 100644 index ed5b87d1a0d0..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_version.py +++ /dev/null @@ -1,303 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1Version(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'build_date': (str,), # noqa: E501 - 'compiler': (str,), # noqa: E501 - 'git_commit': (str,), # noqa: E501 - 'git_tag': (str,), # noqa: E501 - 'git_tree_state': (str,), # noqa: E501 - 'go_version': (str,), # noqa: E501 - 'platform': (str,), # noqa: E501 - 'version': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'build_date': 'buildDate', # noqa: E501 - 'compiler': 'compiler', # noqa: E501 - 'git_commit': 'gitCommit', # noqa: E501 - 'git_tag': 'gitTag', # noqa: E501 - 'git_tree_state': 'gitTreeState', # noqa: E501 - 'go_version': 'goVersion', # noqa: E501 - 'platform': 'platform', # noqa: E501 - 'version': 'version', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, build_date, compiler, git_commit, git_tag, git_tree_state, go_version, platform, version, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Version - a model defined in OpenAPI - - Args: - build_date (str): - compiler (str): - git_commit (str): - git_tag (str): - git_tree_state (str): - go_version (str): - platform (str): - version (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.build_date = build_date - self.compiler = compiler - self.git_commit = git_commit - self.git_tag = git_tag - self.git_tree_state = git_tree_state - self.go_version = go_version - self.platform = platform - self.version = version - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, build_date, compiler, git_commit, git_tag, git_tree_state, go_version, platform, version, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Version - a model defined in OpenAPI - - Args: - build_date (str): - compiler (str): - git_commit (str): - git_tag (str): - git_tree_state (str): - go_version (str): - platform (str): - version (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.build_date = build_date - self.compiler = compiler - self.git_commit = git_commit - self.git_tag = git_tag - self.git_tree_state = git_tree_state - self.go_version = go_version - self.platform = platform - self.version = version - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_volume_claim_gc.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_volume_claim_gc.py deleted file mode 100644 index dd27a89083d0..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_volume_claim_gc.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1VolumeClaimGC(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'strategy': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'strategy': 'strategy', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1VolumeClaimGC - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - strategy (str): Strategy is the strategy to use. One of \"OnWorkflowCompletion\", \"OnWorkflowSuccess\". Defaults to \"OnWorkflowSuccess\". [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1VolumeClaimGC - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - strategy (str): Strategy is the strategy to use. One of \"OnWorkflowCompletion\", \"OnWorkflowSuccess\". Defaults to \"OnWorkflowSuccess\". [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow.py deleted file mode 100644 index 0d99c0d467b1..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow.py +++ /dev/null @@ -1,289 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_status import IoArgoprojWorkflowV1alpha1WorkflowStatus - from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojWorkflowV1alpha1WorkflowSpec'] = IoArgoprojWorkflowV1alpha1WorkflowSpec - globals()['IoArgoprojWorkflowV1alpha1WorkflowStatus'] = IoArgoprojWorkflowV1alpha1WorkflowStatus - globals()['ObjectMeta'] = ObjectMeta - - -class IoArgoprojWorkflowV1alpha1Workflow(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (IoArgoprojWorkflowV1alpha1WorkflowSpec,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - 'status': (IoArgoprojWorkflowV1alpha1WorkflowStatus,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Workflow - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - status (IoArgoprojWorkflowV1alpha1WorkflowStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1Workflow - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - status (IoArgoprojWorkflowV1alpha1WorkflowStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_create_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_create_request.py deleted file mode 100644 index 7c6bc59469e1..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_create_request.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.create_options import CreateOptions - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow - globals()['CreateOptions'] = CreateOptions - globals()['IoArgoprojWorkflowV1alpha1Workflow'] = IoArgoprojWorkflowV1alpha1Workflow - - -class IoArgoprojWorkflowV1alpha1WorkflowCreateRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'create_options': (CreateOptions,), # noqa: E501 - 'instance_id': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'server_dry_run': (bool,), # noqa: E501 - 'workflow': (IoArgoprojWorkflowV1alpha1Workflow,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'create_options': 'createOptions', # noqa: E501 - 'instance_id': 'instanceID', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'server_dry_run': 'serverDryRun', # noqa: E501 - 'workflow': 'workflow', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowCreateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - instance_id (str): This field is no longer used.. [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - server_dry_run (bool): [optional] # noqa: E501 - workflow (IoArgoprojWorkflowV1alpha1Workflow): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowCreateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - instance_id (str): This field is no longer used.. [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - server_dry_run (bool): [optional] # noqa: E501 - workflow (IoArgoprojWorkflowV1alpha1Workflow): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding.py deleted file mode 100644 index d0a99c342eed..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_event_binding_spec import IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec - from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec'] = IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec - globals()['ObjectMeta'] = ObjectMeta - - -class IoArgoprojWorkflowV1alpha1WorkflowEventBinding(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowEventBinding - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowEventBinding - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding_list.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding_list.py deleted file mode 100644 index 28c4aeb9a423..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding_list.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_event_binding import IoArgoprojWorkflowV1alpha1WorkflowEventBinding - from argo_workflows.model.list_meta import ListMeta - globals()['IoArgoprojWorkflowV1alpha1WorkflowEventBinding'] = IoArgoprojWorkflowV1alpha1WorkflowEventBinding - globals()['ListMeta'] = ListMeta - - -class IoArgoprojWorkflowV1alpha1WorkflowEventBindingList(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([IoArgoprojWorkflowV1alpha1WorkflowEventBinding],), # noqa: E501 - 'metadata': (ListMeta,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowEventBindingList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1WorkflowEventBinding]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowEventBindingList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1WorkflowEventBinding]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding_spec.py deleted file mode 100644 index 5e72a731b05a..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_event_binding_spec.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_event import IoArgoprojWorkflowV1alpha1Event - from argo_workflows.model.io_argoproj_workflow_v1alpha1_submit import IoArgoprojWorkflowV1alpha1Submit - globals()['IoArgoprojWorkflowV1alpha1Event'] = IoArgoprojWorkflowV1alpha1Event - globals()['IoArgoprojWorkflowV1alpha1Submit'] = IoArgoprojWorkflowV1alpha1Submit - - -class IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'event': (IoArgoprojWorkflowV1alpha1Event,), # noqa: E501 - 'submit': (IoArgoprojWorkflowV1alpha1Submit,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'event': 'event', # noqa: E501 - 'submit': 'submit', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, event, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec - a model defined in OpenAPI - - Args: - event (IoArgoprojWorkflowV1alpha1Event): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - submit (IoArgoprojWorkflowV1alpha1Submit): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.event = event - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, event, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec - a model defined in OpenAPI - - Args: - event (IoArgoprojWorkflowV1alpha1Event): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - submit (IoArgoprojWorkflowV1alpha1Submit): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.event = event - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc.py deleted file mode 100644 index f8eb6fa28d30..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc.py +++ /dev/null @@ -1,277 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata - globals()['IoArgoprojWorkflowV1alpha1Metadata'] = IoArgoprojWorkflowV1alpha1Metadata - - -class IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'force_finalizer_removal': (bool,), # noqa: E501 - 'pod_metadata': (IoArgoprojWorkflowV1alpha1Metadata,), # noqa: E501 - 'pod_spec_patch': (str,), # noqa: E501 - 'service_account_name': (str,), # noqa: E501 - 'strategy': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'force_finalizer_removal': 'forceFinalizerRemoval', # noqa: E501 - 'pod_metadata': 'podMetadata', # noqa: E501 - 'pod_spec_patch': 'podSpecPatch', # noqa: E501 - 'service_account_name': 'serviceAccountName', # noqa: E501 - 'strategy': 'strategy', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - force_finalizer_removal (bool): ForceFinalizerRemoval: if set to true, the finalizer will be removed in the case that Artifact GC fails. [optional] # noqa: E501 - pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - pod_spec_patch (str): PodSpecPatch holds strategic merge patch to apply against the artgc pod spec.. [optional] # noqa: E501 - service_account_name (str): ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion. [optional] # noqa: E501 - strategy (str): Strategy is the strategy to use.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - force_finalizer_removal (bool): ForceFinalizerRemoval: if set to true, the finalizer will be removed in the case that Artifact GC fails. [optional] # noqa: E501 - pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - pod_spec_patch (str): PodSpecPatch holds strategic merge patch to apply against the artgc pod spec.. [optional] # noqa: E501 - service_account_name (str): ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion. [optional] # noqa: E501 - strategy (str): Strategy is the strategy to use.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_lint_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_lint_request.py deleted file mode 100644 index 0a8504da0e84..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_lint_request.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow - globals()['IoArgoprojWorkflowV1alpha1Workflow'] = IoArgoprojWorkflowV1alpha1Workflow - - -class IoArgoprojWorkflowV1alpha1WorkflowLintRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'namespace': (str,), # noqa: E501 - 'workflow': (IoArgoprojWorkflowV1alpha1Workflow,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'namespace': 'namespace', # noqa: E501 - 'workflow': 'workflow', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowLintRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespace (str): [optional] # noqa: E501 - workflow (IoArgoprojWorkflowV1alpha1Workflow): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowLintRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespace (str): [optional] # noqa: E501 - workflow (IoArgoprojWorkflowV1alpha1Workflow): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_list.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_list.py deleted file mode 100644 index 34b08c1f829e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_list.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow - from argo_workflows.model.list_meta import ListMeta - globals()['IoArgoprojWorkflowV1alpha1Workflow'] = IoArgoprojWorkflowV1alpha1Workflow - globals()['ListMeta'] = ListMeta - - -class IoArgoprojWorkflowV1alpha1WorkflowList(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([IoArgoprojWorkflowV1alpha1Workflow],), # noqa: E501 - 'metadata': (ListMeta,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1Workflow]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1Workflow]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_metadata.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_metadata.py deleted file mode 100644 index adad10278573..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_metadata.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_label_value_from import IoArgoprojWorkflowV1alpha1LabelValueFrom - globals()['IoArgoprojWorkflowV1alpha1LabelValueFrom'] = IoArgoprojWorkflowV1alpha1LabelValueFrom - - -class IoArgoprojWorkflowV1alpha1WorkflowMetadata(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'annotations': ({str: (str,)},), # noqa: E501 - 'labels': ({str: (str,)},), # noqa: E501 - 'labels_from': ({str: (IoArgoprojWorkflowV1alpha1LabelValueFrom,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'annotations': 'annotations', # noqa: E501 - 'labels': 'labels', # noqa: E501 - 'labels_from': 'labelsFrom', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowMetadata - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - labels_from ({str: (IoArgoprojWorkflowV1alpha1LabelValueFrom,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowMetadata - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): [optional] # noqa: E501 - labels ({str: (str,)}): [optional] # noqa: E501 - labels_from ({str: (IoArgoprojWorkflowV1alpha1LabelValueFrom,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py deleted file mode 100644 index 0fc118b08b62..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'memoized': (bool,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'parameters': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'memoized': 'memoized', # noqa: E501 - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - memoized (bool): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - parameters ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - memoized (bool): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - parameters ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resume_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resume_request.py deleted file mode 100644 index 04f3e35fe070..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_resume_request.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1WorkflowResumeRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'node_field_selector': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'node_field_selector': 'nodeFieldSelector', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowResumeRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowResumeRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_retry_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_retry_request.py deleted file mode 100644 index 9412908a05ca..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_retry_request.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1WorkflowRetryRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'node_field_selector': (str,), # noqa: E501 - 'parameters': ([str],), # noqa: E501 - 'restart_successful': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'node_field_selector': 'nodeFieldSelector', # noqa: E501 - 'parameters': 'parameters', # noqa: E501 - 'restart_successful': 'restartSuccessful', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowRetryRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - parameters ([str]): [optional] # noqa: E501 - restart_successful (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowRetryRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - parameters ([str]): [optional] # noqa: E501 - restart_successful (bool): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_set_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_set_request.py deleted file mode 100644 index 5cf79fccbc62..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_set_request.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1WorkflowSetRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'message': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'node_field_selector': (str,), # noqa: E501 - 'output_parameters': (str,), # noqa: E501 - 'phase': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'message': 'message', # noqa: E501 - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'node_field_selector': 'nodeFieldSelector', # noqa: E501 - 'output_parameters': 'outputParameters', # noqa: E501 - 'phase': 'phase', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowSetRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - message (str): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - output_parameters (str): [optional] # noqa: E501 - phase (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowSetRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - message (str): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - output_parameters (str): [optional] # noqa: E501 - phase (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_spec.py deleted file mode 100644 index 3a5e330beda5..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_spec.py +++ /dev/null @@ -1,471 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.affinity import Affinity - from argo_workflows.model.host_alias import HostAlias - from argo_workflows.model.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository_ref import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef - from argo_workflows.model.io_argoproj_workflow_v1alpha1_executor_config import IoArgoprojWorkflowV1alpha1ExecutorConfig - from argo_workflows.model.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook - from argo_workflows.model.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata - from argo_workflows.model.io_argoproj_workflow_v1alpha1_metrics import IoArgoprojWorkflowV1alpha1Metrics - from argo_workflows.model.io_argoproj_workflow_v1alpha1_pod_gc import IoArgoprojWorkflowV1alpha1PodGC - from argo_workflows.model.io_argoproj_workflow_v1alpha1_retry_strategy import IoArgoprojWorkflowV1alpha1RetryStrategy - from argo_workflows.model.io_argoproj_workflow_v1alpha1_synchronization import IoArgoprojWorkflowV1alpha1Synchronization - from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template - from argo_workflows.model.io_argoproj_workflow_v1alpha1_ttl_strategy import IoArgoprojWorkflowV1alpha1TTLStrategy - from argo_workflows.model.io_argoproj_workflow_v1alpha1_volume_claim_gc import IoArgoprojWorkflowV1alpha1VolumeClaimGC - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc import IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_metadata import IoArgoprojWorkflowV1alpha1WorkflowMetadata - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_ref import IoArgoprojWorkflowV1alpha1WorkflowTemplateRef - from argo_workflows.model.io_k8s_api_policy_v1_pod_disruption_budget_spec import IoK8sApiPolicyV1PodDisruptionBudgetSpec - from argo_workflows.model.local_object_reference import LocalObjectReference - from argo_workflows.model.persistent_volume_claim import PersistentVolumeClaim - from argo_workflows.model.pod_dns_config import PodDNSConfig - from argo_workflows.model.pod_security_context import PodSecurityContext - from argo_workflows.model.toleration import Toleration - from argo_workflows.model.volume import Volume - globals()['Affinity'] = Affinity - globals()['HostAlias'] = HostAlias - globals()['IoArgoprojWorkflowV1alpha1Arguments'] = IoArgoprojWorkflowV1alpha1Arguments - globals()['IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef'] = IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef - globals()['IoArgoprojWorkflowV1alpha1ExecutorConfig'] = IoArgoprojWorkflowV1alpha1ExecutorConfig - globals()['IoArgoprojWorkflowV1alpha1LifecycleHook'] = IoArgoprojWorkflowV1alpha1LifecycleHook - globals()['IoArgoprojWorkflowV1alpha1Metadata'] = IoArgoprojWorkflowV1alpha1Metadata - globals()['IoArgoprojWorkflowV1alpha1Metrics'] = IoArgoprojWorkflowV1alpha1Metrics - globals()['IoArgoprojWorkflowV1alpha1PodGC'] = IoArgoprojWorkflowV1alpha1PodGC - globals()['IoArgoprojWorkflowV1alpha1RetryStrategy'] = IoArgoprojWorkflowV1alpha1RetryStrategy - globals()['IoArgoprojWorkflowV1alpha1Synchronization'] = IoArgoprojWorkflowV1alpha1Synchronization - globals()['IoArgoprojWorkflowV1alpha1TTLStrategy'] = IoArgoprojWorkflowV1alpha1TTLStrategy - globals()['IoArgoprojWorkflowV1alpha1Template'] = IoArgoprojWorkflowV1alpha1Template - globals()['IoArgoprojWorkflowV1alpha1VolumeClaimGC'] = IoArgoprojWorkflowV1alpha1VolumeClaimGC - globals()['IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC'] = IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC - globals()['IoArgoprojWorkflowV1alpha1WorkflowMetadata'] = IoArgoprojWorkflowV1alpha1WorkflowMetadata - globals()['IoArgoprojWorkflowV1alpha1WorkflowTemplateRef'] = IoArgoprojWorkflowV1alpha1WorkflowTemplateRef - globals()['IoK8sApiPolicyV1PodDisruptionBudgetSpec'] = IoK8sApiPolicyV1PodDisruptionBudgetSpec - globals()['LocalObjectReference'] = LocalObjectReference - globals()['PersistentVolumeClaim'] = PersistentVolumeClaim - globals()['PodDNSConfig'] = PodDNSConfig - globals()['PodSecurityContext'] = PodSecurityContext - globals()['Toleration'] = Toleration - globals()['Volume'] = Volume - - -class IoArgoprojWorkflowV1alpha1WorkflowSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'active_deadline_seconds': (int,), # noqa: E501 - 'affinity': (Affinity,), # noqa: E501 - 'archive_logs': (bool,), # noqa: E501 - 'arguments': (IoArgoprojWorkflowV1alpha1Arguments,), # noqa: E501 - 'artifact_gc': (IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC,), # noqa: E501 - 'artifact_repository_ref': (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef,), # noqa: E501 - 'automount_service_account_token': (bool,), # noqa: E501 - 'dns_config': (PodDNSConfig,), # noqa: E501 - 'dns_policy': (str,), # noqa: E501 - 'entrypoint': (str,), # noqa: E501 - 'executor': (IoArgoprojWorkflowV1alpha1ExecutorConfig,), # noqa: E501 - 'hooks': ({str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)},), # noqa: E501 - 'host_aliases': ([HostAlias],), # noqa: E501 - 'host_network': (bool,), # noqa: E501 - 'image_pull_secrets': ([LocalObjectReference],), # noqa: E501 - 'metrics': (IoArgoprojWorkflowV1alpha1Metrics,), # noqa: E501 - 'node_selector': ({str: (str,)},), # noqa: E501 - 'on_exit': (str,), # noqa: E501 - 'parallelism': (int,), # noqa: E501 - 'pod_disruption_budget': (IoK8sApiPolicyV1PodDisruptionBudgetSpec,), # noqa: E501 - 'pod_gc': (IoArgoprojWorkflowV1alpha1PodGC,), # noqa: E501 - 'pod_metadata': (IoArgoprojWorkflowV1alpha1Metadata,), # noqa: E501 - 'pod_priority': (int,), # noqa: E501 - 'pod_priority_class_name': (str,), # noqa: E501 - 'pod_spec_patch': (str,), # noqa: E501 - 'priority': (int,), # noqa: E501 - 'retry_strategy': (IoArgoprojWorkflowV1alpha1RetryStrategy,), # noqa: E501 - 'scheduler_name': (str,), # noqa: E501 - 'security_context': (PodSecurityContext,), # noqa: E501 - 'service_account_name': (str,), # noqa: E501 - 'shutdown': (str,), # noqa: E501 - 'suspend': (bool,), # noqa: E501 - 'synchronization': (IoArgoprojWorkflowV1alpha1Synchronization,), # noqa: E501 - 'template_defaults': (IoArgoprojWorkflowV1alpha1Template,), # noqa: E501 - 'templates': ([IoArgoprojWorkflowV1alpha1Template],), # noqa: E501 - 'tolerations': ([Toleration],), # noqa: E501 - 'ttl_strategy': (IoArgoprojWorkflowV1alpha1TTLStrategy,), # noqa: E501 - 'volume_claim_gc': (IoArgoprojWorkflowV1alpha1VolumeClaimGC,), # noqa: E501 - 'volume_claim_templates': ([PersistentVolumeClaim],), # noqa: E501 - 'volumes': ([Volume],), # noqa: E501 - 'workflow_metadata': (IoArgoprojWorkflowV1alpha1WorkflowMetadata,), # noqa: E501 - 'workflow_template_ref': (IoArgoprojWorkflowV1alpha1WorkflowTemplateRef,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'active_deadline_seconds': 'activeDeadlineSeconds', # noqa: E501 - 'affinity': 'affinity', # noqa: E501 - 'archive_logs': 'archiveLogs', # noqa: E501 - 'arguments': 'arguments', # noqa: E501 - 'artifact_gc': 'artifactGC', # noqa: E501 - 'artifact_repository_ref': 'artifactRepositoryRef', # noqa: E501 - 'automount_service_account_token': 'automountServiceAccountToken', # noqa: E501 - 'dns_config': 'dnsConfig', # noqa: E501 - 'dns_policy': 'dnsPolicy', # noqa: E501 - 'entrypoint': 'entrypoint', # noqa: E501 - 'executor': 'executor', # noqa: E501 - 'hooks': 'hooks', # noqa: E501 - 'host_aliases': 'hostAliases', # noqa: E501 - 'host_network': 'hostNetwork', # noqa: E501 - 'image_pull_secrets': 'imagePullSecrets', # noqa: E501 - 'metrics': 'metrics', # noqa: E501 - 'node_selector': 'nodeSelector', # noqa: E501 - 'on_exit': 'onExit', # noqa: E501 - 'parallelism': 'parallelism', # noqa: E501 - 'pod_disruption_budget': 'podDisruptionBudget', # noqa: E501 - 'pod_gc': 'podGC', # noqa: E501 - 'pod_metadata': 'podMetadata', # noqa: E501 - 'pod_priority': 'podPriority', # noqa: E501 - 'pod_priority_class_name': 'podPriorityClassName', # noqa: E501 - 'pod_spec_patch': 'podSpecPatch', # noqa: E501 - 'priority': 'priority', # noqa: E501 - 'retry_strategy': 'retryStrategy', # noqa: E501 - 'scheduler_name': 'schedulerName', # noqa: E501 - 'security_context': 'securityContext', # noqa: E501 - 'service_account_name': 'serviceAccountName', # noqa: E501 - 'shutdown': 'shutdown', # noqa: E501 - 'suspend': 'suspend', # noqa: E501 - 'synchronization': 'synchronization', # noqa: E501 - 'template_defaults': 'templateDefaults', # noqa: E501 - 'templates': 'templates', # noqa: E501 - 'tolerations': 'tolerations', # noqa: E501 - 'ttl_strategy': 'ttlStrategy', # noqa: E501 - 'volume_claim_gc': 'volumeClaimGC', # noqa: E501 - 'volume_claim_templates': 'volumeClaimTemplates', # noqa: E501 - 'volumes': 'volumes', # noqa: E501 - 'workflow_metadata': 'workflowMetadata', # noqa: E501 - 'workflow_template_ref': 'workflowTemplateRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - active_deadline_seconds (int): Optional duration in seconds relative to the workflow start time which the workflow is allowed to run before the controller terminates the io.argoproj.workflow.v1alpha1. A value of zero is used to terminate a Running workflow. [optional] # noqa: E501 - affinity (Affinity): [optional] # noqa: E501 - archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - artifact_gc (IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC): [optional] # noqa: E501 - artifact_repository_ref (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef): [optional] # noqa: E501 - automount_service_account_token (bool): AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false.. [optional] # noqa: E501 - dns_config (PodDNSConfig): [optional] # noqa: E501 - dns_policy (str): Set DNS policy for workflow pods. Defaults to \"ClusterFirst\". Valid values are 'ClusterFirstWithHostNet', 'ClusterFirst', 'Default' or 'None'. DNS parameters given in DNSConfig will be merged with the policy selected with DNSPolicy. To have DNS options set along with hostNetwork, you have to specify DNS policy explicitly to 'ClusterFirstWithHostNet'.. [optional] # noqa: E501 - entrypoint (str): Entrypoint is a template reference to the starting point of the io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - executor (IoArgoprojWorkflowV1alpha1ExecutorConfig): [optional] # noqa: E501 - hooks ({str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)}): Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step. [optional] # noqa: E501 - host_aliases ([HostAlias]): [optional] # noqa: E501 - host_network (bool): Host networking requested for this workflow pod. Default to false.. [optional] # noqa: E501 - image_pull_secrets ([LocalObjectReference]): ImagePullSecrets is a list of references to secrets in the same namespace to use for pulling any images in pods that reference this ServiceAccount. ImagePullSecrets are distinct from Secrets because Secrets can be mounted in the pod, but ImagePullSecrets are only accessed by the kubelet. More info: https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod. [optional] # noqa: E501 - metrics (IoArgoprojWorkflowV1alpha1Metrics): [optional] # noqa: E501 - node_selector ({str: (str,)}): NodeSelector is a selector which will result in all pods of the workflow to be scheduled on the selected node(s). This is able to be overridden by a nodeSelector specified in the template.. [optional] # noqa: E501 - on_exit (str): OnExit is a template reference which is invoked at the end of the workflow, irrespective of the success, failure, or error of the primary io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - parallelism (int): Parallelism limits the max total parallel pods that can execute at the same time in a workflow. [optional] # noqa: E501 - pod_disruption_budget (IoK8sApiPolicyV1PodDisruptionBudgetSpec): [optional] # noqa: E501 - pod_gc (IoArgoprojWorkflowV1alpha1PodGC): [optional] # noqa: E501 - pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - pod_priority (int): Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead.. [optional] # noqa: E501 - pod_priority_class_name (str): PriorityClassName to apply to workflow pods.. [optional] # noqa: E501 - pod_spec_patch (str): PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits).. [optional] # noqa: E501 - priority (int): Priority is used if controller is configured to process limited number of workflows in parallel. Workflows with higher priority are processed first.. [optional] # noqa: E501 - retry_strategy (IoArgoprojWorkflowV1alpha1RetryStrategy): [optional] # noqa: E501 - scheduler_name (str): Set scheduler name for all pods. Will be overridden if container/script template's scheduler name is set. Default scheduler will be used if neither specified.. [optional] # noqa: E501 - security_context (PodSecurityContext): [optional] # noqa: E501 - service_account_name (str): ServiceAccountName is the name of the ServiceAccount to run all pods of the workflow as.. [optional] # noqa: E501 - shutdown (str): Shutdown will shutdown the workflow according to its ShutdownStrategy. [optional] # noqa: E501 - suspend (bool): Suspend will suspend the workflow and prevent execution of any future steps in the workflow. [optional] # noqa: E501 - synchronization (IoArgoprojWorkflowV1alpha1Synchronization): [optional] # noqa: E501 - template_defaults (IoArgoprojWorkflowV1alpha1Template): [optional] # noqa: E501 - templates ([IoArgoprojWorkflowV1alpha1Template]): Templates is a list of workflow templates used in a workflow. [optional] # noqa: E501 - tolerations ([Toleration]): Tolerations to apply to workflow pods.. [optional] # noqa: E501 - ttl_strategy (IoArgoprojWorkflowV1alpha1TTLStrategy): [optional] # noqa: E501 - volume_claim_gc (IoArgoprojWorkflowV1alpha1VolumeClaimGC): [optional] # noqa: E501 - volume_claim_templates ([PersistentVolumeClaim]): VolumeClaimTemplates is a list of claims that containers are allowed to reference. The Workflow controller will create the claims at the beginning of the workflow and delete the claims upon completion of the workflow. [optional] # noqa: E501 - volumes ([Volume]): Volumes is a list of volumes that can be mounted by containers in a io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - workflow_metadata (IoArgoprojWorkflowV1alpha1WorkflowMetadata): [optional] # noqa: E501 - workflow_template_ref (IoArgoprojWorkflowV1alpha1WorkflowTemplateRef): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - active_deadline_seconds (int): Optional duration in seconds relative to the workflow start time which the workflow is allowed to run before the controller terminates the io.argoproj.workflow.v1alpha1. A value of zero is used to terminate a Running workflow. [optional] # noqa: E501 - affinity (Affinity): [optional] # noqa: E501 - archive_logs (bool): ArchiveLogs indicates if the container logs should be archived. [optional] # noqa: E501 - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - artifact_gc (IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC): [optional] # noqa: E501 - artifact_repository_ref (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef): [optional] # noqa: E501 - automount_service_account_token (bool): AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false.. [optional] # noqa: E501 - dns_config (PodDNSConfig): [optional] # noqa: E501 - dns_policy (str): Set DNS policy for workflow pods. Defaults to \"ClusterFirst\". Valid values are 'ClusterFirstWithHostNet', 'ClusterFirst', 'Default' or 'None'. DNS parameters given in DNSConfig will be merged with the policy selected with DNSPolicy. To have DNS options set along with hostNetwork, you have to specify DNS policy explicitly to 'ClusterFirstWithHostNet'.. [optional] # noqa: E501 - entrypoint (str): Entrypoint is a template reference to the starting point of the io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - executor (IoArgoprojWorkflowV1alpha1ExecutorConfig): [optional] # noqa: E501 - hooks ({str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)}): Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step. [optional] # noqa: E501 - host_aliases ([HostAlias]): [optional] # noqa: E501 - host_network (bool): Host networking requested for this workflow pod. Default to false.. [optional] # noqa: E501 - image_pull_secrets ([LocalObjectReference]): ImagePullSecrets is a list of references to secrets in the same namespace to use for pulling any images in pods that reference this ServiceAccount. ImagePullSecrets are distinct from Secrets because Secrets can be mounted in the pod, but ImagePullSecrets are only accessed by the kubelet. More info: https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod. [optional] # noqa: E501 - metrics (IoArgoprojWorkflowV1alpha1Metrics): [optional] # noqa: E501 - node_selector ({str: (str,)}): NodeSelector is a selector which will result in all pods of the workflow to be scheduled on the selected node(s). This is able to be overridden by a nodeSelector specified in the template.. [optional] # noqa: E501 - on_exit (str): OnExit is a template reference which is invoked at the end of the workflow, irrespective of the success, failure, or error of the primary io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - parallelism (int): Parallelism limits the max total parallel pods that can execute at the same time in a workflow. [optional] # noqa: E501 - pod_disruption_budget (IoK8sApiPolicyV1PodDisruptionBudgetSpec): [optional] # noqa: E501 - pod_gc (IoArgoprojWorkflowV1alpha1PodGC): [optional] # noqa: E501 - pod_metadata (IoArgoprojWorkflowV1alpha1Metadata): [optional] # noqa: E501 - pod_priority (int): Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead.. [optional] # noqa: E501 - pod_priority_class_name (str): PriorityClassName to apply to workflow pods.. [optional] # noqa: E501 - pod_spec_patch (str): PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits).. [optional] # noqa: E501 - priority (int): Priority is used if controller is configured to process limited number of workflows in parallel. Workflows with higher priority are processed first.. [optional] # noqa: E501 - retry_strategy (IoArgoprojWorkflowV1alpha1RetryStrategy): [optional] # noqa: E501 - scheduler_name (str): Set scheduler name for all pods. Will be overridden if container/script template's scheduler name is set. Default scheduler will be used if neither specified.. [optional] # noqa: E501 - security_context (PodSecurityContext): [optional] # noqa: E501 - service_account_name (str): ServiceAccountName is the name of the ServiceAccount to run all pods of the workflow as.. [optional] # noqa: E501 - shutdown (str): Shutdown will shutdown the workflow according to its ShutdownStrategy. [optional] # noqa: E501 - suspend (bool): Suspend will suspend the workflow and prevent execution of any future steps in the workflow. [optional] # noqa: E501 - synchronization (IoArgoprojWorkflowV1alpha1Synchronization): [optional] # noqa: E501 - template_defaults (IoArgoprojWorkflowV1alpha1Template): [optional] # noqa: E501 - templates ([IoArgoprojWorkflowV1alpha1Template]): Templates is a list of workflow templates used in a workflow. [optional] # noqa: E501 - tolerations ([Toleration]): Tolerations to apply to workflow pods.. [optional] # noqa: E501 - ttl_strategy (IoArgoprojWorkflowV1alpha1TTLStrategy): [optional] # noqa: E501 - volume_claim_gc (IoArgoprojWorkflowV1alpha1VolumeClaimGC): [optional] # noqa: E501 - volume_claim_templates ([PersistentVolumeClaim]): VolumeClaimTemplates is a list of claims that containers are allowed to reference. The Workflow controller will create the claims at the beginning of the workflow and delete the claims upon completion of the workflow. [optional] # noqa: E501 - volumes ([Volume]): Volumes is a list of volumes that can be mounted by containers in a io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - workflow_metadata (IoArgoprojWorkflowV1alpha1WorkflowMetadata): [optional] # noqa: E501 - workflow_template_ref (IoArgoprojWorkflowV1alpha1WorkflowTemplateRef): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py deleted file mode 100644 index 86e17c538bbd..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py +++ /dev/null @@ -1,349 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_art_gc_status import IoArgoprojWorkflowV1alpha1ArtGCStatus - from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository_ref_status import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus - from argo_workflows.model.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition - from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_status import IoArgoprojWorkflowV1alpha1NodeStatus - from argo_workflows.model.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs - from argo_workflows.model.io_argoproj_workflow_v1alpha1_synchronization_status import IoArgoprojWorkflowV1alpha1SynchronizationStatus - from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec - from argo_workflows.model.volume import Volume - globals()['IoArgoprojWorkflowV1alpha1ArtGCStatus'] = IoArgoprojWorkflowV1alpha1ArtGCStatus - globals()['IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus'] = IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus - globals()['IoArgoprojWorkflowV1alpha1Condition'] = IoArgoprojWorkflowV1alpha1Condition - globals()['IoArgoprojWorkflowV1alpha1NodeStatus'] = IoArgoprojWorkflowV1alpha1NodeStatus - globals()['IoArgoprojWorkflowV1alpha1Outputs'] = IoArgoprojWorkflowV1alpha1Outputs - globals()['IoArgoprojWorkflowV1alpha1SynchronizationStatus'] = IoArgoprojWorkflowV1alpha1SynchronizationStatus - globals()['IoArgoprojWorkflowV1alpha1Template'] = IoArgoprojWorkflowV1alpha1Template - globals()['IoArgoprojWorkflowV1alpha1WorkflowSpec'] = IoArgoprojWorkflowV1alpha1WorkflowSpec - globals()['Volume'] = Volume - - -class IoArgoprojWorkflowV1alpha1WorkflowStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'artifact_gc_status': (IoArgoprojWorkflowV1alpha1ArtGCStatus,), # noqa: E501 - 'artifact_repository_ref': (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus,), # noqa: E501 - 'compressed_nodes': (str,), # noqa: E501 - 'conditions': ([IoArgoprojWorkflowV1alpha1Condition],), # noqa: E501 - 'estimated_duration': (int,), # noqa: E501 - 'finished_at': (datetime,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'nodes': ({str: (IoArgoprojWorkflowV1alpha1NodeStatus,)},), # noqa: E501 - 'offload_node_status_version': (str,), # noqa: E501 - 'outputs': (IoArgoprojWorkflowV1alpha1Outputs,), # noqa: E501 - 'persistent_volume_claims': ([Volume],), # noqa: E501 - 'phase': (str,), # noqa: E501 - 'progress': (str,), # noqa: E501 - 'resources_duration': ({str: (int,)},), # noqa: E501 - 'started_at': (datetime,), # noqa: E501 - 'stored_templates': ({str: (IoArgoprojWorkflowV1alpha1Template,)},), # noqa: E501 - 'stored_workflow_template_spec': (IoArgoprojWorkflowV1alpha1WorkflowSpec,), # noqa: E501 - 'synchronization': (IoArgoprojWorkflowV1alpha1SynchronizationStatus,), # noqa: E501 - 'task_results_completion_status': ({str: (bool,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'artifact_gc_status': 'artifactGCStatus', # noqa: E501 - 'artifact_repository_ref': 'artifactRepositoryRef', # noqa: E501 - 'compressed_nodes': 'compressedNodes', # noqa: E501 - 'conditions': 'conditions', # noqa: E501 - 'estimated_duration': 'estimatedDuration', # noqa: E501 - 'finished_at': 'finishedAt', # noqa: E501 - 'message': 'message', # noqa: E501 - 'nodes': 'nodes', # noqa: E501 - 'offload_node_status_version': 'offloadNodeStatusVersion', # noqa: E501 - 'outputs': 'outputs', # noqa: E501 - 'persistent_volume_claims': 'persistentVolumeClaims', # noqa: E501 - 'phase': 'phase', # noqa: E501 - 'progress': 'progress', # noqa: E501 - 'resources_duration': 'resourcesDuration', # noqa: E501 - 'started_at': 'startedAt', # noqa: E501 - 'stored_templates': 'storedTemplates', # noqa: E501 - 'stored_workflow_template_spec': 'storedWorkflowTemplateSpec', # noqa: E501 - 'synchronization': 'synchronization', # noqa: E501 - 'task_results_completion_status': 'taskResultsCompletionStatus', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_gc_status (IoArgoprojWorkflowV1alpha1ArtGCStatus): [optional] # noqa: E501 - artifact_repository_ref (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus): [optional] # noqa: E501 - compressed_nodes (str): Compressed and base64 decoded Nodes map. [optional] # noqa: E501 - conditions ([IoArgoprojWorkflowV1alpha1Condition]): Conditions is a list of conditions the Workflow may have. [optional] # noqa: E501 - estimated_duration (int): EstimatedDuration in seconds.. [optional] # noqa: E501 - finished_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): A human readable message indicating details about why the workflow is in this condition.. [optional] # noqa: E501 - nodes ({str: (IoArgoprojWorkflowV1alpha1NodeStatus,)}): Nodes is a mapping between a node ID and the node's status.. [optional] # noqa: E501 - offload_node_status_version (str): Whether on not node status has been offloaded to a database. If exists, then Nodes and CompressedNodes will be empty. This will actually be populated with a hash of the offloaded data.. [optional] # noqa: E501 - outputs (IoArgoprojWorkflowV1alpha1Outputs): [optional] # noqa: E501 - persistent_volume_claims ([Volume]): PersistentVolumeClaims tracks all PVCs that were created as part of the io.argoproj.workflow.v1alpha1. The contents of this list are drained at the end of the workflow.. [optional] # noqa: E501 - phase (str): Phase a simple, high-level summary of where the workflow is in its lifecycle. Will be \"\" (Unknown), \"Pending\", or \"Running\" before the workflow is completed, and \"Succeeded\", \"Failed\" or \"Error\" once the workflow has completed.. [optional] # noqa: E501 - progress (str): Progress to completion. [optional] # noqa: E501 - resources_duration ({str: (int,)}): ResourcesDuration is the total for the workflow. [optional] # noqa: E501 - started_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - stored_templates ({str: (IoArgoprojWorkflowV1alpha1Template,)}): StoredTemplates is a mapping between a template ref and the node's status.. [optional] # noqa: E501 - stored_workflow_template_spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): [optional] # noqa: E501 - synchronization (IoArgoprojWorkflowV1alpha1SynchronizationStatus): [optional] # noqa: E501 - task_results_completion_status ({str: (bool,)}): TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - artifact_gc_status (IoArgoprojWorkflowV1alpha1ArtGCStatus): [optional] # noqa: E501 - artifact_repository_ref (IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus): [optional] # noqa: E501 - compressed_nodes (str): Compressed and base64 decoded Nodes map. [optional] # noqa: E501 - conditions ([IoArgoprojWorkflowV1alpha1Condition]): Conditions is a list of conditions the Workflow may have. [optional] # noqa: E501 - estimated_duration (int): EstimatedDuration in seconds.. [optional] # noqa: E501 - finished_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): A human readable message indicating details about why the workflow is in this condition.. [optional] # noqa: E501 - nodes ({str: (IoArgoprojWorkflowV1alpha1NodeStatus,)}): Nodes is a mapping between a node ID and the node's status.. [optional] # noqa: E501 - offload_node_status_version (str): Whether on not node status has been offloaded to a database. If exists, then Nodes and CompressedNodes will be empty. This will actually be populated with a hash of the offloaded data.. [optional] # noqa: E501 - outputs (IoArgoprojWorkflowV1alpha1Outputs): [optional] # noqa: E501 - persistent_volume_claims ([Volume]): PersistentVolumeClaims tracks all PVCs that were created as part of the io.argoproj.workflow.v1alpha1. The contents of this list are drained at the end of the workflow.. [optional] # noqa: E501 - phase (str): Phase a simple, high-level summary of where the workflow is in its lifecycle. Will be \"\" (Unknown), \"Pending\", or \"Running\" before the workflow is completed, and \"Succeeded\", \"Failed\" or \"Error\" once the workflow has completed.. [optional] # noqa: E501 - progress (str): Progress to completion. [optional] # noqa: E501 - resources_duration ({str: (int,)}): ResourcesDuration is the total for the workflow. [optional] # noqa: E501 - started_at (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - stored_templates ({str: (IoArgoprojWorkflowV1alpha1Template,)}): StoredTemplates is a mapping between a template ref and the node's status.. [optional] # noqa: E501 - stored_workflow_template_spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): [optional] # noqa: E501 - synchronization (IoArgoprojWorkflowV1alpha1SynchronizationStatus): [optional] # noqa: E501 - task_results_completion_status ({str: (bool,)}): TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_step.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_step.py deleted file mode 100644 index 4172f91ca0fc..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_step.py +++ /dev/null @@ -1,315 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments - from argo_workflows.model.io_argoproj_workflow_v1alpha1_continue_on import IoArgoprojWorkflowV1alpha1ContinueOn - from argo_workflows.model.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook - from argo_workflows.model.io_argoproj_workflow_v1alpha1_sequence import IoArgoprojWorkflowV1alpha1Sequence - from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template - from argo_workflows.model.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef - globals()['IoArgoprojWorkflowV1alpha1Arguments'] = IoArgoprojWorkflowV1alpha1Arguments - globals()['IoArgoprojWorkflowV1alpha1ContinueOn'] = IoArgoprojWorkflowV1alpha1ContinueOn - globals()['IoArgoprojWorkflowV1alpha1LifecycleHook'] = IoArgoprojWorkflowV1alpha1LifecycleHook - globals()['IoArgoprojWorkflowV1alpha1Sequence'] = IoArgoprojWorkflowV1alpha1Sequence - globals()['IoArgoprojWorkflowV1alpha1Template'] = IoArgoprojWorkflowV1alpha1Template - globals()['IoArgoprojWorkflowV1alpha1TemplateRef'] = IoArgoprojWorkflowV1alpha1TemplateRef - - -class IoArgoprojWorkflowV1alpha1WorkflowStep(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'arguments': (IoArgoprojWorkflowV1alpha1Arguments,), # noqa: E501 - 'continue_on': (IoArgoprojWorkflowV1alpha1ContinueOn,), # noqa: E501 - 'hooks': ({str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)},), # noqa: E501 - 'inline': (IoArgoprojWorkflowV1alpha1Template,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'on_exit': (str,), # noqa: E501 - 'template': (str,), # noqa: E501 - 'template_ref': (IoArgoprojWorkflowV1alpha1TemplateRef,), # noqa: E501 - 'when': (str,), # noqa: E501 - 'with_items': ([dict],), # noqa: E501 - 'with_param': (str,), # noqa: E501 - 'with_sequence': (IoArgoprojWorkflowV1alpha1Sequence,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'arguments': 'arguments', # noqa: E501 - 'continue_on': 'continueOn', # noqa: E501 - 'hooks': 'hooks', # noqa: E501 - 'inline': 'inline', # noqa: E501 - 'name': 'name', # noqa: E501 - 'on_exit': 'onExit', # noqa: E501 - 'template': 'template', # noqa: E501 - 'template_ref': 'templateRef', # noqa: E501 - 'when': 'when', # noqa: E501 - 'with_items': 'withItems', # noqa: E501 - 'with_param': 'withParam', # noqa: E501 - 'with_sequence': 'withSequence', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowStep - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - continue_on (IoArgoprojWorkflowV1alpha1ContinueOn): [optional] # noqa: E501 - hooks ({str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)}): Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step. [optional] # noqa: E501 - inline (IoArgoprojWorkflowV1alpha1Template): [optional] # noqa: E501 - name (str): Name of the step. [optional] # noqa: E501 - on_exit (str): OnExit is a template reference which is invoked at the end of the template, irrespective of the success, failure, or error of the primary template. DEPRECATED: Use Hooks[exit].Template instead.. [optional] # noqa: E501 - template (str): Template is the name of the template to execute as the step. [optional] # noqa: E501 - template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 - when (str): When is an expression in which the step should conditionally execute. [optional] # noqa: E501 - with_items ([dict]): WithItems expands a step into multiple parallel steps from the items in the list. [optional] # noqa: E501 - with_param (str): WithParam expands a step into multiple parallel steps from the value in the parameter, which is expected to be a JSON list.. [optional] # noqa: E501 - with_sequence (IoArgoprojWorkflowV1alpha1Sequence): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowStep - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - arguments (IoArgoprojWorkflowV1alpha1Arguments): [optional] # noqa: E501 - continue_on (IoArgoprojWorkflowV1alpha1ContinueOn): [optional] # noqa: E501 - hooks ({str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)}): Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step. [optional] # noqa: E501 - inline (IoArgoprojWorkflowV1alpha1Template): [optional] # noqa: E501 - name (str): Name of the step. [optional] # noqa: E501 - on_exit (str): OnExit is a template reference which is invoked at the end of the template, irrespective of the success, failure, or error of the primary template. DEPRECATED: Use Hooks[exit].Template instead.. [optional] # noqa: E501 - template (str): Template is the name of the template to execute as the step. [optional] # noqa: E501 - template_ref (IoArgoprojWorkflowV1alpha1TemplateRef): [optional] # noqa: E501 - when (str): When is an expression in which the step should conditionally execute. [optional] # noqa: E501 - with_items ([dict]): WithItems expands a step into multiple parallel steps from the items in the list. [optional] # noqa: E501 - with_param (str): WithParam expands a step into multiple parallel steps from the value in the parameter, which is expected to be a JSON list.. [optional] # noqa: E501 - with_sequence (IoArgoprojWorkflowV1alpha1Sequence): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_stop_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_stop_request.py deleted file mode 100644 index 321513fc0e73..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_stop_request.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1WorkflowStopRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'message': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'node_field_selector': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'message': 'message', # noqa: E501 - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'node_field_selector': 'nodeFieldSelector', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowStopRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - message (str): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowStopRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - message (str): [optional] # noqa: E501 - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - node_field_selector (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_submit_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_submit_request.py deleted file mode 100644 index 180c32a9fc99..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_submit_request.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_submit_opts import IoArgoprojWorkflowV1alpha1SubmitOpts - globals()['IoArgoprojWorkflowV1alpha1SubmitOpts'] = IoArgoprojWorkflowV1alpha1SubmitOpts - - -class IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'namespace': (str,), # noqa: E501 - 'resource_kind': (str,), # noqa: E501 - 'resource_name': (str,), # noqa: E501 - 'submit_options': (IoArgoprojWorkflowV1alpha1SubmitOpts,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'namespace': 'namespace', # noqa: E501 - 'resource_kind': 'resourceKind', # noqa: E501 - 'resource_name': 'resourceName', # noqa: E501 - 'submit_options': 'submitOptions', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespace (str): [optional] # noqa: E501 - resource_kind (str): [optional] # noqa: E501 - resource_name (str): [optional] # noqa: E501 - submit_options (IoArgoprojWorkflowV1alpha1SubmitOpts): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespace (str): [optional] # noqa: E501 - resource_kind (str): [optional] # noqa: E501 - resource_name (str): [optional] # noqa: E501 - submit_options (IoArgoprojWorkflowV1alpha1SubmitOpts): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_suspend_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_suspend_request.py deleted file mode 100644 index 6420cecbebc3..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_suspend_request.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_task_set_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_task_set_spec.py deleted file mode 100644 index b081782c627c..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_task_set_spec.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template - globals()['IoArgoprojWorkflowV1alpha1Template'] = IoArgoprojWorkflowV1alpha1Template - - -class IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'tasks': ({str: (IoArgoprojWorkflowV1alpha1Template,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'tasks': 'tasks', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - tasks ({str: (IoArgoprojWorkflowV1alpha1Template,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - tasks ({str: (IoArgoprojWorkflowV1alpha1Template,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_task_set_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_task_set_status.py deleted file mode 100644 index f45a852ab6b7..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_task_set_status.py +++ /dev/null @@ -1,261 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_result import IoArgoprojWorkflowV1alpha1NodeResult - globals()['IoArgoprojWorkflowV1alpha1NodeResult'] = IoArgoprojWorkflowV1alpha1NodeResult - - -class IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'nodes': ({str: (IoArgoprojWorkflowV1alpha1NodeResult,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'nodes': 'nodes', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - nodes ({str: (IoArgoprojWorkflowV1alpha1NodeResult,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - nodes ({str: (IoArgoprojWorkflowV1alpha1NodeResult,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template.py deleted file mode 100644 index cefd3e6511f2..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec - from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojWorkflowV1alpha1WorkflowSpec'] = IoArgoprojWorkflowV1alpha1WorkflowSpec - globals()['ObjectMeta'] = ObjectMeta - - -class IoArgoprojWorkflowV1alpha1WorkflowTemplate(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (IoArgoprojWorkflowV1alpha1WorkflowSpec,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplate - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, metadata, spec, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplate - a model defined in OpenAPI - - Args: - metadata (ObjectMeta): - spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.metadata = metadata - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_create_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_create_request.py deleted file mode 100644 index 8cedb13d08be..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_create_request.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.create_options import CreateOptions - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate - globals()['CreateOptions'] = CreateOptions - globals()['IoArgoprojWorkflowV1alpha1WorkflowTemplate'] = IoArgoprojWorkflowV1alpha1WorkflowTemplate - - -class IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'create_options': (CreateOptions,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'template': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'create_options': 'createOptions', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'template': 'template', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1WorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1WorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_lint_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_lint_request.py deleted file mode 100644 index 2b2ce37b57a7..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_lint_request.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.create_options import CreateOptions - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate - globals()['CreateOptions'] = CreateOptions - globals()['IoArgoprojWorkflowV1alpha1WorkflowTemplate'] = IoArgoprojWorkflowV1alpha1WorkflowTemplate - - -class IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'create_options': (CreateOptions,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'template': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'create_options': 'createOptions', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'template': 'template', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1WorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1WorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_list.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_list.py deleted file mode 100644 index f7a843c2f50e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_list.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate - from argo_workflows.model.list_meta import ListMeta - globals()['IoArgoprojWorkflowV1alpha1WorkflowTemplate'] = IoArgoprojWorkflowV1alpha1WorkflowTemplate - globals()['ListMeta'] = ListMeta - - -class IoArgoprojWorkflowV1alpha1WorkflowTemplateList(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([IoArgoprojWorkflowV1alpha1WorkflowTemplate],), # noqa: E501 - 'metadata': (ListMeta,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1WorkflowTemplate]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, items, metadata, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateList - a model defined in OpenAPI - - Args: - items ([IoArgoprojWorkflowV1alpha1WorkflowTemplate]): - metadata (ListMeta): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.items = items - self.metadata = metadata - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_ref.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_ref.py deleted file mode 100644 index 1de65d6bb56d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_ref.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1WorkflowTemplateRef(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'cluster_scope': (bool,), # noqa: E501 - 'name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'cluster_scope': 'clusterScope', # noqa: E501 - 'name': 'name', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateRef - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cluster_scope (bool): ClusterScope indicates the referred template is cluster scoped (i.e. a ClusterWorkflowTemplate).. [optional] # noqa: E501 - name (str): Name is the resource name of the workflow template.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateRef - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - cluster_scope (bool): ClusterScope indicates the referred template is cluster scoped (i.e. a ClusterWorkflowTemplate).. [optional] # noqa: E501 - name (str): Name is the resource name of the workflow template.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_update_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_update_request.py deleted file mode 100644 index f922f386d082..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_template_update_request.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate - globals()['IoArgoprojWorkflowV1alpha1WorkflowTemplate'] = IoArgoprojWorkflowV1alpha1WorkflowTemplate - - -class IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'template': (IoArgoprojWorkflowV1alpha1WorkflowTemplate,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'template': 'template', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): DEPRECATED: This field is ignored.. [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1WorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): DEPRECATED: This field is ignored.. [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - template (IoArgoprojWorkflowV1alpha1WorkflowTemplate): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_terminate_request.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_terminate_request.py deleted file mode 100644 index f4a4b862712d..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_terminate_request.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_watch_event.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_watch_event.py deleted file mode 100644 index 722fd357ccb4..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_watch_event.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow - globals()['IoArgoprojWorkflowV1alpha1Workflow'] = IoArgoprojWorkflowV1alpha1Workflow - - -class IoArgoprojWorkflowV1alpha1WorkflowWatchEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'object': (IoArgoprojWorkflowV1alpha1Workflow,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'object': 'object', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (IoArgoprojWorkflowV1alpha1Workflow): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojWorkflowV1alpha1WorkflowWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (IoArgoprojWorkflowV1alpha1Workflow): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_k8s_api_policy_v1_pod_disruption_budget_spec.py b/sdks/python/client/argo_workflows/model/io_k8s_api_policy_v1_pod_disruption_budget_spec.py deleted file mode 100644 index 4f71a528489e..000000000000 --- a/sdks/python/client/argo_workflows/model/io_k8s_api_policy_v1_pod_disruption_budget_spec.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.label_selector import LabelSelector - globals()['LabelSelector'] = LabelSelector - - -class IoK8sApiPolicyV1PodDisruptionBudgetSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'max_unavailable': (str,), # noqa: E501 - 'min_available': (str,), # noqa: E501 - 'selector': (LabelSelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'max_unavailable': 'maxUnavailable', # noqa: E501 - 'min_available': 'minAvailable', # noqa: E501 - 'selector': 'selector', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoK8sApiPolicyV1PodDisruptionBudgetSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - max_unavailable (str): [optional] # noqa: E501 - min_available (str): [optional] # noqa: E501 - selector (LabelSelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoK8sApiPolicyV1PodDisruptionBudgetSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - max_unavailable (str): [optional] # noqa: E501 - min_available (str): [optional] # noqa: E501 - selector (LabelSelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/iscsi_volume_source.py b/sdks/python/client/argo_workflows/model/iscsi_volume_source.py deleted file mode 100644 index 2ae5e355f608..000000000000 --- a/sdks/python/client/argo_workflows/model/iscsi_volume_source.py +++ /dev/null @@ -1,311 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.local_object_reference import LocalObjectReference - globals()['LocalObjectReference'] = LocalObjectReference - - -class ISCSIVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'iqn': (str,), # noqa: E501 - 'lun': (int,), # noqa: E501 - 'target_portal': (str,), # noqa: E501 - 'chap_auth_discovery': (bool,), # noqa: E501 - 'chap_auth_session': (bool,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'initiator_name': (str,), # noqa: E501 - 'iscsi_interface': (str,), # noqa: E501 - 'portals': ([str],), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'secret_ref': (LocalObjectReference,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'iqn': 'iqn', # noqa: E501 - 'lun': 'lun', # noqa: E501 - 'target_portal': 'targetPortal', # noqa: E501 - 'chap_auth_discovery': 'chapAuthDiscovery', # noqa: E501 - 'chap_auth_session': 'chapAuthSession', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'initiator_name': 'initiatorName', # noqa: E501 - 'iscsi_interface': 'iscsiInterface', # noqa: E501 - 'portals': 'portals', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'secret_ref': 'secretRef', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, iqn, lun, target_portal, *args, **kwargs): # noqa: E501 - """ISCSIVolumeSource - a model defined in OpenAPI - - Args: - iqn (str): Target iSCSI Qualified Name. - lun (int): iSCSI Target Lun number. - target_portal (str): iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - chap_auth_discovery (bool): whether support iSCSI Discovery CHAP authentication. [optional] # noqa: E501 - chap_auth_session (bool): whether support iSCSI Session CHAP authentication. [optional] # noqa: E501 - fs_type (str): Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi. [optional] # noqa: E501 - initiator_name (str): Custom iSCSI Initiator Name. If initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface : will be created for the connection.. [optional] # noqa: E501 - iscsi_interface (str): iSCSI Interface Name that uses an iSCSI transport. Defaults to 'default' (tcp).. [optional] # noqa: E501 - portals ([str]): iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260).. [optional] # noqa: E501 - read_only (bool): ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false.. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.iqn = iqn - self.lun = lun - self.target_portal = target_portal - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, iqn, lun, target_portal, *args, **kwargs): # noqa: E501 - """ISCSIVolumeSource - a model defined in OpenAPI - - Args: - iqn (str): Target iSCSI Qualified Name. - lun (int): iSCSI Target Lun number. - target_portal (str): iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - chap_auth_discovery (bool): whether support iSCSI Discovery CHAP authentication. [optional] # noqa: E501 - chap_auth_session (bool): whether support iSCSI Session CHAP authentication. [optional] # noqa: E501 - fs_type (str): Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi. [optional] # noqa: E501 - initiator_name (str): Custom iSCSI Initiator Name. If initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface : will be created for the connection.. [optional] # noqa: E501 - iscsi_interface (str): iSCSI Interface Name that uses an iSCSI transport. Defaults to 'default' (tcp).. [optional] # noqa: E501 - portals ([str]): iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260).. [optional] # noqa: E501 - read_only (bool): ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false.. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.iqn = iqn - self.lun = lun - self.target_portal = target_portal - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/key_to_path.py b/sdks/python/client/argo_workflows/model/key_to_path.py deleted file mode 100644 index 73e3a740d4a4..000000000000 --- a/sdks/python/client/argo_workflows/model/key_to_path.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class KeyToPath(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'path': (str,), # noqa: E501 - 'mode': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'path': 'path', # noqa: E501 - 'mode': 'mode', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, path, *args, **kwargs): # noqa: E501 - """KeyToPath - a model defined in OpenAPI - - Args: - key (str): The key to project. - path (str): The relative path of the file to map the key to. May not be an absolute path. May not contain the path element '..'. May not start with the string '..'. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mode (int): Optional: mode bits used to set permissions on this file. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, path, *args, **kwargs): # noqa: E501 - """KeyToPath - a model defined in OpenAPI - - Args: - key (str): The key to project. - path (str): The relative path of the file to map the key to. May not be an absolute path. May not contain the path element '..'. May not start with the string '..'. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mode (int): Optional: mode bits used to set permissions on this file. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/label_selector.py b/sdks/python/client/argo_workflows/model/label_selector.py deleted file mode 100644 index 3f6b61104c1e..000000000000 --- a/sdks/python/client/argo_workflows/model/label_selector.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.label_selector_requirement import LabelSelectorRequirement - globals()['LabelSelectorRequirement'] = LabelSelectorRequirement - - -class LabelSelector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'match_expressions': ([LabelSelectorRequirement],), # noqa: E501 - 'match_labels': ({str: (str,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'match_expressions': 'matchExpressions', # noqa: E501 - 'match_labels': 'matchLabels', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """LabelSelector - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - match_expressions ([LabelSelectorRequirement]): matchExpressions is a list of label selector requirements. The requirements are ANDed.. [optional] # noqa: E501 - match_labels ({str: (str,)}): matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is \"key\", the operator is \"In\", and the values array contains only \"value\". The requirements are ANDed.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """LabelSelector - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - match_expressions ([LabelSelectorRequirement]): matchExpressions is a list of label selector requirements. The requirements are ANDed.. [optional] # noqa: E501 - match_labels ({str: (str,)}): matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is \"key\", the operator is \"In\", and the values array contains only \"value\". The requirements are ANDed.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/label_selector_requirement.py b/sdks/python/client/argo_workflows/model/label_selector_requirement.py deleted file mode 100644 index 332fde604232..000000000000 --- a/sdks/python/client/argo_workflows/model/label_selector_requirement.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class LabelSelectorRequirement(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'operator': (str,), # noqa: E501 - 'values': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'operator': 'operator', # noqa: E501 - 'values': 'values', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, operator, *args, **kwargs): # noqa: E501 - """LabelSelectorRequirement - a model defined in OpenAPI - - Args: - key (str): key is the label key that the selector applies to. - operator (str): operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - values ([str]): values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.operator = operator - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, operator, *args, **kwargs): # noqa: E501 - """LabelSelectorRequirement - a model defined in OpenAPI - - Args: - key (str): key is the label key that the selector applies to. - operator (str): operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - values ([str]): values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.operator = operator - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/lifecycle.py b/sdks/python/client/argo_workflows/model/lifecycle.py deleted file mode 100644 index aba36ad4e822..000000000000 --- a/sdks/python/client/argo_workflows/model/lifecycle.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.lifecycle_handler import LifecycleHandler - globals()['LifecycleHandler'] = LifecycleHandler - - -class Lifecycle(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'post_start': (LifecycleHandler,), # noqa: E501 - 'pre_stop': (LifecycleHandler,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'post_start': 'postStart', # noqa: E501 - 'pre_stop': 'preStop', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """Lifecycle - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - post_start (LifecycleHandler): [optional] # noqa: E501 - pre_stop (LifecycleHandler): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """Lifecycle - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - post_start (LifecycleHandler): [optional] # noqa: E501 - pre_stop (LifecycleHandler): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/lifecycle_handler.py b/sdks/python/client/argo_workflows/model/lifecycle_handler.py deleted file mode 100644 index d672083fc770..000000000000 --- a/sdks/python/client/argo_workflows/model/lifecycle_handler.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.exec_action import ExecAction - from argo_workflows.model.http_get_action import HTTPGetAction - from argo_workflows.model.tcp_socket_action import TCPSocketAction - globals()['ExecAction'] = ExecAction - globals()['HTTPGetAction'] = HTTPGetAction - globals()['TCPSocketAction'] = TCPSocketAction - - -class LifecycleHandler(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - '_exec': (ExecAction,), # noqa: E501 - 'http_get': (HTTPGetAction,), # noqa: E501 - 'tcp_socket': (TCPSocketAction,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - '_exec': 'exec', # noqa: E501 - 'http_get': 'httpGet', # noqa: E501 - 'tcp_socket': 'tcpSocket', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """LifecycleHandler - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - _exec (ExecAction): [optional] # noqa: E501 - http_get (HTTPGetAction): [optional] # noqa: E501 - tcp_socket (TCPSocketAction): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """LifecycleHandler - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - _exec (ExecAction): [optional] # noqa: E501 - http_get (HTTPGetAction): [optional] # noqa: E501 - tcp_socket (TCPSocketAction): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/list_meta.py b/sdks/python/client/argo_workflows/model/list_meta.py deleted file mode 100644 index c2d2f917b35d..000000000000 --- a/sdks/python/client/argo_workflows/model/list_meta.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ListMeta(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - '_continue': (str,), # noqa: E501 - 'remaining_item_count': (int,), # noqa: E501 - 'resource_version': (str,), # noqa: E501 - 'self_link': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - '_continue': 'continue', # noqa: E501 - 'remaining_item_count': 'remainingItemCount', # noqa: E501 - 'resource_version': 'resourceVersion', # noqa: E501 - 'self_link': 'selfLink', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ListMeta - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - _continue (str): continue may be set if the user set a limit on the number of items returned, and indicates that the server has more data available. The value is opaque and may be used to issue another request to the endpoint that served this list to retrieve the next set of available objects. Continuing a consistent list may not be possible if the server configuration has changed or more than a few minutes have passed. The resourceVersion field returned when using this continue value will be identical to the value in the first response, unless you have received this token from an error message.. [optional] # noqa: E501 - remaining_item_count (int): remainingItemCount is the number of subsequent items in the list which are not included in this list response. If the list request contained label or field selectors, then the number of remaining items is unknown and the field will be left unset and omitted during serialization. If the list is complete (either because it is not chunking or because this is the last chunk), then there are no more remaining items and this field will be left unset and omitted during serialization. Servers older than v1.15 do not set this field. The intended use of the remainingItemCount is *estimating* the size of a collection. Clients should not rely on the remainingItemCount to be set or to be exact.. [optional] # noqa: E501 - resource_version (str): String that identifies the server's internal version of this object that can be used by clients to determine when objects have changed. Value must be treated as opaque by clients and passed unmodified back to the server. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency. [optional] # noqa: E501 - self_link (str): selfLink is a URL representing this object. Populated by the system. Read-only. DEPRECATED Kubernetes will stop propagating this field in 1.20 release and the field is planned to be removed in 1.21 release.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ListMeta - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - _continue (str): continue may be set if the user set a limit on the number of items returned, and indicates that the server has more data available. The value is opaque and may be used to issue another request to the endpoint that served this list to retrieve the next set of available objects. Continuing a consistent list may not be possible if the server configuration has changed or more than a few minutes have passed. The resourceVersion field returned when using this continue value will be identical to the value in the first response, unless you have received this token from an error message.. [optional] # noqa: E501 - remaining_item_count (int): remainingItemCount is the number of subsequent items in the list which are not included in this list response. If the list request contained label or field selectors, then the number of remaining items is unknown and the field will be left unset and omitted during serialization. If the list is complete (either because it is not chunking or because this is the last chunk), then there are no more remaining items and this field will be left unset and omitted during serialization. Servers older than v1.15 do not set this field. The intended use of the remainingItemCount is *estimating* the size of a collection. Clients should not rely on the remainingItemCount to be set or to be exact.. [optional] # noqa: E501 - resource_version (str): String that identifies the server's internal version of this object that can be used by clients to determine when objects have changed. Value must be treated as opaque by clients and passed unmodified back to the server. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency. [optional] # noqa: E501 - self_link (str): selfLink is a URL representing this object. Populated by the system. Read-only. DEPRECATED Kubernetes will stop propagating this field in 1.20 release and the field is planned to be removed in 1.21 release.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/local_object_reference.py b/sdks/python/client/argo_workflows/model/local_object_reference.py deleted file mode 100644 index 59420f662f05..000000000000 --- a/sdks/python/client/argo_workflows/model/local_object_reference.py +++ /dev/null @@ -1,255 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class LocalObjectReference(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """LocalObjectReference - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """LocalObjectReference - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/managed_fields_entry.py b/sdks/python/client/argo_workflows/model/managed_fields_entry.py deleted file mode 100644 index 66eb438b6527..000000000000 --- a/sdks/python/client/argo_workflows/model/managed_fields_entry.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ManagedFieldsEntry(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'api_version': (str,), # noqa: E501 - 'fields_type': (str,), # noqa: E501 - 'fields_v1': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501 - 'manager': (str,), # noqa: E501 - 'operation': (str,), # noqa: E501 - 'subresource': (str,), # noqa: E501 - 'time': (datetime,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'api_version': 'apiVersion', # noqa: E501 - 'fields_type': 'fieldsType', # noqa: E501 - 'fields_v1': 'fieldsV1', # noqa: E501 - 'manager': 'manager', # noqa: E501 - 'operation': 'operation', # noqa: E501 - 'subresource': 'subresource', # noqa: E501 - 'time': 'time', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ManagedFieldsEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the version of this resource that this field set applies to. The format is \"group/version\" just like the top-level APIVersion field. It is necessary to track the version of a field set because it cannot be automatically converted.. [optional] # noqa: E501 - fields_type (str): FieldsType is the discriminator for the different fields format and version. There is currently only one possible value: \"FieldsV1\". [optional] # noqa: E501 - fields_v1 (bool, date, datetime, dict, float, int, list, str, none_type): FieldsV1 stores a set of fields in a data structure like a Trie, in JSON format. Each key is either a '.' representing the field itself, and will always map to an empty set, or a string representing a sub-field or item. The string will follow one of these four formats: 'f:', where is the name of a field in a struct, or key in a map 'v:', where is the exact json formatted value of a list item 'i:', where is position of a item in a list 'k:', where is a map of a list item's key fields to their unique values If a key maps to an empty Fields value, the field that key represents is part of the set. The exact format is defined in sigs.k8s.io/structured-merge-diff. [optional] # noqa: E501 - manager (str): Manager is an identifier of the workflow managing these fields.. [optional] # noqa: E501 - operation (str): Operation is the type of operation which lead to this ManagedFieldsEntry being created. The only valid values for this field are 'Apply' and 'Update'.. [optional] # noqa: E501 - subresource (str): Subresource is the name of the subresource used to update that object, or empty string if the object was updated through the main resource. The value of this field is used to distinguish between managers, even if they share the same name. For example, a status update will be distinct from a regular update using the same manager name. Note that the APIVersion field is not related to the Subresource field and it always corresponds to the version of the main resource.. [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ManagedFieldsEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the version of this resource that this field set applies to. The format is \"group/version\" just like the top-level APIVersion field. It is necessary to track the version of a field set because it cannot be automatically converted.. [optional] # noqa: E501 - fields_type (str): FieldsType is the discriminator for the different fields format and version. There is currently only one possible value: \"FieldsV1\". [optional] # noqa: E501 - fields_v1 (bool, date, datetime, dict, float, int, list, str, none_type): FieldsV1 stores a set of fields in a data structure like a Trie, in JSON format. Each key is either a '.' representing the field itself, and will always map to an empty set, or a string representing a sub-field or item. The string will follow one of these four formats: 'f:', where is the name of a field in a struct, or key in a map 'v:', where is the exact json formatted value of a list item 'i:', where is position of a item in a list 'k:', where is a map of a list item's key fields to their unique values If a key maps to an empty Fields value, the field that key represents is part of the set. The exact format is defined in sigs.k8s.io/structured-merge-diff. [optional] # noqa: E501 - manager (str): Manager is an identifier of the workflow managing these fields.. [optional] # noqa: E501 - operation (str): Operation is the type of operation which lead to this ManagedFieldsEntry being created. The only valid values for this field are 'Apply' and 'Update'.. [optional] # noqa: E501 - subresource (str): Subresource is the name of the subresource used to update that object, or empty string if the object was updated through the main resource. The value of this field is used to distinguish between managers, even if they share the same name. For example, a status update will be distinct from a regular update using the same manager name. Note that the APIVersion field is not related to the Subresource field and it always corresponds to the version of the main resource.. [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/nfs_volume_source.py b/sdks/python/client/argo_workflows/model/nfs_volume_source.py deleted file mode 100644 index b7a0a13f5ce5..000000000000 --- a/sdks/python/client/argo_workflows/model/nfs_volume_source.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class NFSVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'path': (str,), # noqa: E501 - 'server': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'path': 'path', # noqa: E501 - 'server': 'server', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, path, server, *args, **kwargs): # noqa: E501 - """NFSVolumeSource - a model defined in OpenAPI - - Args: - path (str): Path that is exported by the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs - server (str): Server is the hostname or IP address of the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - read_only (bool): ReadOnly here will force the NFS export to be mounted with read-only permissions. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - self.server = server - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, path, server, *args, **kwargs): # noqa: E501 - """NFSVolumeSource - a model defined in OpenAPI - - Args: - path (str): Path that is exported by the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs - server (str): Server is the hostname or IP address of the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - read_only (bool): ReadOnly here will force the NFS export to be mounted with read-only permissions. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - self.server = server - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/node_affinity.py b/sdks/python/client/argo_workflows/model/node_affinity.py deleted file mode 100644 index e79ba63a0623..000000000000 --- a/sdks/python/client/argo_workflows/model/node_affinity.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.node_selector import NodeSelector - from argo_workflows.model.preferred_scheduling_term import PreferredSchedulingTerm - globals()['NodeSelector'] = NodeSelector - globals()['PreferredSchedulingTerm'] = PreferredSchedulingTerm - - -class NodeAffinity(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'preferred_during_scheduling_ignored_during_execution': ([PreferredSchedulingTerm],), # noqa: E501 - 'required_during_scheduling_ignored_during_execution': (NodeSelector,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'preferred_during_scheduling_ignored_during_execution': 'preferredDuringSchedulingIgnoredDuringExecution', # noqa: E501 - 'required_during_scheduling_ignored_during_execution': 'requiredDuringSchedulingIgnoredDuringExecution', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """NodeAffinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - preferred_during_scheduling_ignored_during_execution ([PreferredSchedulingTerm]): The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred.. [optional] # noqa: E501 - required_during_scheduling_ignored_during_execution (NodeSelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """NodeAffinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - preferred_during_scheduling_ignored_during_execution ([PreferredSchedulingTerm]): The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred.. [optional] # noqa: E501 - required_during_scheduling_ignored_during_execution (NodeSelector): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/node_selector.py b/sdks/python/client/argo_workflows/model/node_selector.py deleted file mode 100644 index b51dc7291647..000000000000 --- a/sdks/python/client/argo_workflows/model/node_selector.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.node_selector_term import NodeSelectorTerm - globals()['NodeSelectorTerm'] = NodeSelectorTerm - - -class NodeSelector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'node_selector_terms': ([NodeSelectorTerm],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'node_selector_terms': 'nodeSelectorTerms', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, node_selector_terms, *args, **kwargs): # noqa: E501 - """NodeSelector - a model defined in OpenAPI - - Args: - node_selector_terms ([NodeSelectorTerm]): Required. A list of node selector terms. The terms are ORed. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.node_selector_terms = node_selector_terms - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, node_selector_terms, *args, **kwargs): # noqa: E501 - """NodeSelector - a model defined in OpenAPI - - Args: - node_selector_terms ([NodeSelectorTerm]): Required. A list of node selector terms. The terms are ORed. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.node_selector_terms = node_selector_terms - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/node_selector_requirement.py b/sdks/python/client/argo_workflows/model/node_selector_requirement.py deleted file mode 100644 index a887c2bcf074..000000000000 --- a/sdks/python/client/argo_workflows/model/node_selector_requirement.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class NodeSelectorRequirement(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ('operator',): { - 'DOESNOTEXIST': "DoesNotExist", - 'EXISTS': "Exists", - 'GT': "Gt", - 'IN': "In", - 'LT': "Lt", - 'NOTIN': "NotIn", - }, - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'operator': (str,), # noqa: E501 - 'values': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'operator': 'operator', # noqa: E501 - 'values': 'values', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, operator, *args, **kwargs): # noqa: E501 - """NodeSelectorRequirement - a model defined in OpenAPI - - Args: - key (str): The label key that the selector applies to. - operator (str): Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. Possible enum values: - `\"DoesNotExist\"` - `\"Exists\"` - `\"Gt\"` - `\"In\"` - `\"Lt\"` - `\"NotIn\"` - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - values ([str]): An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.operator = operator - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, operator, *args, **kwargs): # noqa: E501 - """NodeSelectorRequirement - a model defined in OpenAPI - - Args: - key (str): The label key that the selector applies to. - operator (str): Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. Possible enum values: - `\"DoesNotExist\"` - `\"Exists\"` - `\"Gt\"` - `\"In\"` - `\"Lt\"` - `\"NotIn\"` - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - values ([str]): An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - self.operator = operator - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/node_selector_term.py b/sdks/python/client/argo_workflows/model/node_selector_term.py deleted file mode 100644 index 372d6cd83b2d..000000000000 --- a/sdks/python/client/argo_workflows/model/node_selector_term.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.node_selector_requirement import NodeSelectorRequirement - globals()['NodeSelectorRequirement'] = NodeSelectorRequirement - - -class NodeSelectorTerm(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'match_expressions': ([NodeSelectorRequirement],), # noqa: E501 - 'match_fields': ([NodeSelectorRequirement],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'match_expressions': 'matchExpressions', # noqa: E501 - 'match_fields': 'matchFields', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """NodeSelectorTerm - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - match_expressions ([NodeSelectorRequirement]): A list of node selector requirements by node's labels.. [optional] # noqa: E501 - match_fields ([NodeSelectorRequirement]): A list of node selector requirements by node's fields.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """NodeSelectorTerm - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - match_expressions ([NodeSelectorRequirement]): A list of node selector requirements by node's labels.. [optional] # noqa: E501 - match_fields ([NodeSelectorRequirement]): A list of node selector requirements by node's fields.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/object_field_selector.py b/sdks/python/client/argo_workflows/model/object_field_selector.py deleted file mode 100644 index e4c4e599c708..000000000000 --- a/sdks/python/client/argo_workflows/model/object_field_selector.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ObjectFieldSelector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'field_path': (str,), # noqa: E501 - 'api_version': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'field_path': 'fieldPath', # noqa: E501 - 'api_version': 'apiVersion', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, field_path, *args, **kwargs): # noqa: E501 - """ObjectFieldSelector - a model defined in OpenAPI - - Args: - field_path (str): Path of the field to select in the specified API version. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): Version of the schema the FieldPath is written in terms of, defaults to \"v1\".. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.field_path = field_path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, field_path, *args, **kwargs): # noqa: E501 - """ObjectFieldSelector - a model defined in OpenAPI - - Args: - field_path (str): Path of the field to select in the specified API version. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): Version of the schema the FieldPath is written in terms of, defaults to \"v1\".. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.field_path = field_path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/object_meta.py b/sdks/python/client/argo_workflows/model/object_meta.py deleted file mode 100644 index 1bf09981c09e..000000000000 --- a/sdks/python/client/argo_workflows/model/object_meta.py +++ /dev/null @@ -1,323 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.managed_fields_entry import ManagedFieldsEntry - from argo_workflows.model.owner_reference import OwnerReference - globals()['ManagedFieldsEntry'] = ManagedFieldsEntry - globals()['OwnerReference'] = OwnerReference - - -class ObjectMeta(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'annotations': ({str: (str,)},), # noqa: E501 - 'cluster_name': (str,), # noqa: E501 - 'creation_timestamp': (datetime,), # noqa: E501 - 'deletion_grace_period_seconds': (int,), # noqa: E501 - 'deletion_timestamp': (datetime,), # noqa: E501 - 'finalizers': ([str],), # noqa: E501 - 'generate_name': (str,), # noqa: E501 - 'generation': (int,), # noqa: E501 - 'labels': ({str: (str,)},), # noqa: E501 - 'managed_fields': ([ManagedFieldsEntry],), # noqa: E501 - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'owner_references': ([OwnerReference],), # noqa: E501 - 'resource_version': (str,), # noqa: E501 - 'self_link': (str,), # noqa: E501 - 'uid': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'annotations': 'annotations', # noqa: E501 - 'cluster_name': 'clusterName', # noqa: E501 - 'creation_timestamp': 'creationTimestamp', # noqa: E501 - 'deletion_grace_period_seconds': 'deletionGracePeriodSeconds', # noqa: E501 - 'deletion_timestamp': 'deletionTimestamp', # noqa: E501 - 'finalizers': 'finalizers', # noqa: E501 - 'generate_name': 'generateName', # noqa: E501 - 'generation': 'generation', # noqa: E501 - 'labels': 'labels', # noqa: E501 - 'managed_fields': 'managedFields', # noqa: E501 - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'owner_references': 'ownerReferences', # noqa: E501 - 'resource_version': 'resourceVersion', # noqa: E501 - 'self_link': 'selfLink', # noqa: E501 - 'uid': 'uid', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ObjectMeta - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): Annotations is an unstructured key value map stored with a resource that may be set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. More info: http://kubernetes.io/docs/user-guide/annotations. [optional] # noqa: E501 - cluster_name (str): The name of the cluster which the object belongs to. This is used to distinguish resources with same name and namespace in different clusters. This field is not set anywhere right now and apiserver is going to ignore it if set in create or update request.. [optional] # noqa: E501 - creation_timestamp (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - deletion_grace_period_seconds (int): Number of seconds allowed for this object to gracefully terminate before it will be removed from the system. Only set when deletionTimestamp is also set. May only be shortened. Read-only.. [optional] # noqa: E501 - deletion_timestamp (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - finalizers ([str]): Must be empty before the object is deleted from the registry. Each entry is an identifier for the responsible component that will remove the entry from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only be removed. Finalizers may be processed and removed in any order. Order is NOT enforced because it introduces significant risk of stuck finalizers. finalizers is a shared field, any actor with permission can reorder it. If the finalizer list is processed in order, then this can lead to a situation in which the component responsible for the first finalizer in the list is waiting for a signal (field value, external system, or other) produced by a component responsible for a finalizer later in the list, resulting in a deadlock. Without enforced ordering finalizers are free to order amongst themselves and are not vulnerable to ordering changes in the list.. [optional] # noqa: E501 - generate_name (str): GenerateName is an optional prefix, used by the server, to generate a unique name ONLY IF the Name field has not been provided. If this field is used, the name returned to the client will be different than the name passed. This value will also be combined with a unique suffix. The provided value has the same validation rules as the Name field, and may be truncated by the length of the suffix required to make the value unique on the server. If this field is specified and the generated name exists, the server will NOT return a 409 - instead, it will either return 201 Created or 500 with Reason ServerTimeout indicating a unique name could not be found in the time allotted, and the client should retry (optionally after the time indicated in the Retry-After header). Applied only if Name is not specified. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#idempotency. [optional] # noqa: E501 - generation (int): A sequence number representing a specific generation of the desired state. Populated by the system. Read-only.. [optional] # noqa: E501 - labels ({str: (str,)}): Map of string keys and values that can be used to organize and categorize (scope and select) objects. May match selectors of replication controllers and services. More info: http://kubernetes.io/docs/user-guide/labels. [optional] # noqa: E501 - managed_fields ([ManagedFieldsEntry]): ManagedFields maps workflow-id and version to the set of fields that are managed by that workflow. This is mostly for internal housekeeping, and users typically shouldn't need to set or understand this field. A workflow can be the user's name, a controller's name, or the name of a specific apply path like \"ci-cd\". The set of fields is always in the version that the workflow used when modifying the object.. [optional] # noqa: E501 - name (str): Name must be unique within a namespace. Is required when creating resources, although some resources may allow a client to request the generation of an appropriate name automatically. Name is primarily intended for creation idempotence and configuration definition. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/identifiers#names. [optional] # noqa: E501 - namespace (str): Namespace defines the space within which each name must be unique. An empty namespace is equivalent to the \"default\" namespace, but \"default\" is the canonical representation. Not all objects are required to be scoped to a namespace - the value of this field for those objects will be empty. Must be a DNS_LABEL. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/namespaces. [optional] # noqa: E501 - owner_references ([OwnerReference]): List of objects depended by this object. If ALL objects in the list have been deleted, this object will be garbage collected. If this object is managed by a controller, then an entry in this list will point to this controller, with the controller field set to true. There cannot be more than one managing controller.. [optional] # noqa: E501 - resource_version (str): An opaque value that represents the internal version of this object that can be used by clients to determine when objects have changed. May be used for optimistic concurrency, change detection, and the watch operation on a resource or set of resources. Clients must treat these values as opaque and passed unmodified back to the server. They may only be valid for a particular resource or set of resources. Populated by the system. Read-only. Value must be treated as opaque by clients and . More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency. [optional] # noqa: E501 - self_link (str): SelfLink is a URL representing this object. Populated by the system. Read-only. DEPRECATED Kubernetes will stop propagating this field in 1.20 release and the field is planned to be removed in 1.21 release.. [optional] # noqa: E501 - uid (str): UID is the unique in time and space value for this object. It is typically generated by the server on successful creation of a resource and is not allowed to change on PUT operations. Populated by the system. Read-only. More info: http://kubernetes.io/docs/user-guide/identifiers#uids. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ObjectMeta - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - annotations ({str: (str,)}): Annotations is an unstructured key value map stored with a resource that may be set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. More info: http://kubernetes.io/docs/user-guide/annotations. [optional] # noqa: E501 - cluster_name (str): The name of the cluster which the object belongs to. This is used to distinguish resources with same name and namespace in different clusters. This field is not set anywhere right now and apiserver is going to ignore it if set in create or update request.. [optional] # noqa: E501 - creation_timestamp (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - deletion_grace_period_seconds (int): Number of seconds allowed for this object to gracefully terminate before it will be removed from the system. Only set when deletionTimestamp is also set. May only be shortened. Read-only.. [optional] # noqa: E501 - deletion_timestamp (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - finalizers ([str]): Must be empty before the object is deleted from the registry. Each entry is an identifier for the responsible component that will remove the entry from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only be removed. Finalizers may be processed and removed in any order. Order is NOT enforced because it introduces significant risk of stuck finalizers. finalizers is a shared field, any actor with permission can reorder it. If the finalizer list is processed in order, then this can lead to a situation in which the component responsible for the first finalizer in the list is waiting for a signal (field value, external system, or other) produced by a component responsible for a finalizer later in the list, resulting in a deadlock. Without enforced ordering finalizers are free to order amongst themselves and are not vulnerable to ordering changes in the list.. [optional] # noqa: E501 - generate_name (str): GenerateName is an optional prefix, used by the server, to generate a unique name ONLY IF the Name field has not been provided. If this field is used, the name returned to the client will be different than the name passed. This value will also be combined with a unique suffix. The provided value has the same validation rules as the Name field, and may be truncated by the length of the suffix required to make the value unique on the server. If this field is specified and the generated name exists, the server will NOT return a 409 - instead, it will either return 201 Created or 500 with Reason ServerTimeout indicating a unique name could not be found in the time allotted, and the client should retry (optionally after the time indicated in the Retry-After header). Applied only if Name is not specified. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#idempotency. [optional] # noqa: E501 - generation (int): A sequence number representing a specific generation of the desired state. Populated by the system. Read-only.. [optional] # noqa: E501 - labels ({str: (str,)}): Map of string keys and values that can be used to organize and categorize (scope and select) objects. May match selectors of replication controllers and services. More info: http://kubernetes.io/docs/user-guide/labels. [optional] # noqa: E501 - managed_fields ([ManagedFieldsEntry]): ManagedFields maps workflow-id and version to the set of fields that are managed by that workflow. This is mostly for internal housekeeping, and users typically shouldn't need to set or understand this field. A workflow can be the user's name, a controller's name, or the name of a specific apply path like \"ci-cd\". The set of fields is always in the version that the workflow used when modifying the object.. [optional] # noqa: E501 - name (str): Name must be unique within a namespace. Is required when creating resources, although some resources may allow a client to request the generation of an appropriate name automatically. Name is primarily intended for creation idempotence and configuration definition. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/identifiers#names. [optional] # noqa: E501 - namespace (str): Namespace defines the space within which each name must be unique. An empty namespace is equivalent to the \"default\" namespace, but \"default\" is the canonical representation. Not all objects are required to be scoped to a namespace - the value of this field for those objects will be empty. Must be a DNS_LABEL. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/namespaces. [optional] # noqa: E501 - owner_references ([OwnerReference]): List of objects depended by this object. If ALL objects in the list have been deleted, this object will be garbage collected. If this object is managed by a controller, then an entry in this list will point to this controller, with the controller field set to true. There cannot be more than one managing controller.. [optional] # noqa: E501 - resource_version (str): An opaque value that represents the internal version of this object that can be used by clients to determine when objects have changed. May be used for optimistic concurrency, change detection, and the watch operation on a resource or set of resources. Clients must treat these values as opaque and passed unmodified back to the server. They may only be valid for a particular resource or set of resources. Populated by the system. Read-only. Value must be treated as opaque by clients and . More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency. [optional] # noqa: E501 - self_link (str): SelfLink is a URL representing this object. Populated by the system. Read-only. DEPRECATED Kubernetes will stop propagating this field in 1.20 release and the field is planned to be removed in 1.21 release.. [optional] # noqa: E501 - uid (str): UID is the unique in time and space value for this object. It is typically generated by the server on successful creation of a resource and is not allowed to change on PUT operations. Populated by the system. Read-only. More info: http://kubernetes.io/docs/user-guide/identifiers#uids. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/object_reference.py b/sdks/python/client/argo_workflows/model/object_reference.py deleted file mode 100644 index d4f48dbd1f4a..000000000000 --- a/sdks/python/client/argo_workflows/model/object_reference.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ObjectReference(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'api_version': (str,), # noqa: E501 - 'field_path': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'resource_version': (str,), # noqa: E501 - 'uid': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'api_version': 'apiVersion', # noqa: E501 - 'field_path': 'fieldPath', # noqa: E501 - 'kind': 'kind', # noqa: E501 - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'resource_version': 'resourceVersion', # noqa: E501 - 'uid': 'uid', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ObjectReference - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): API version of the referent.. [optional] # noqa: E501 - field_path (str): If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: \"spec.containers{name}\" (where \"name\" refers to the name of the container that triggered the event) or if no container name is specified \"spec.containers[2]\" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object.. [optional] # noqa: E501 - kind (str): Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - namespace (str): Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/. [optional] # noqa: E501 - resource_version (str): Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency. [optional] # noqa: E501 - uid (str): UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ObjectReference - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): API version of the referent.. [optional] # noqa: E501 - field_path (str): If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: \"spec.containers{name}\" (where \"name\" refers to the name of the container that triggered the event) or if no container name is specified \"spec.containers[2]\" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object.. [optional] # noqa: E501 - kind (str): Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - namespace (str): Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/. [optional] # noqa: E501 - resource_version (str): Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency. [optional] # noqa: E501 - uid (str): UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/owner_reference.py b/sdks/python/client/argo_workflows/model/owner_reference.py deleted file mode 100644 index 2d4370a53023..000000000000 --- a/sdks/python/client/argo_workflows/model/owner_reference.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class OwnerReference(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'uid': (str,), # noqa: E501 - 'block_owner_deletion': (bool,), # noqa: E501 - 'controller': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - 'name': 'name', # noqa: E501 - 'uid': 'uid', # noqa: E501 - 'block_owner_deletion': 'blockOwnerDeletion', # noqa: E501 - 'controller': 'controller', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, api_version, kind, name, uid, *args, **kwargs): # noqa: E501 - """OwnerReference - a model defined in OpenAPI - - Args: - api_version (str): API version of the referent. - kind (str): Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds - name (str): Name of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#names - uid (str): UID of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#uids - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - block_owner_deletion (bool): If true, AND if the owner has the \"foregroundDeletion\" finalizer, then the owner cannot be deleted from the key-value store until this reference is removed. Defaults to false. To set this field, a user needs \"delete\" permission of the owner, otherwise 422 (Unprocessable Entity) will be returned.. [optional] # noqa: E501 - controller (bool): If true, this reference points to the managing controller.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.api_version = api_version - self.kind = kind - self.name = name - self.uid = uid - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, api_version, kind, name, uid, *args, **kwargs): # noqa: E501 - """OwnerReference - a model defined in OpenAPI - - Args: - api_version (str): API version of the referent. - kind (str): Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds - name (str): Name of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#names - uid (str): UID of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#uids - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - block_owner_deletion (bool): If true, AND if the owner has the \"foregroundDeletion\" finalizer, then the owner cannot be deleted from the key-value store until this reference is removed. Defaults to false. To set this field, a user needs \"delete\" permission of the owner, otherwise 422 (Unprocessable Entity) will be returned.. [optional] # noqa: E501 - controller (bool): If true, this reference points to the managing controller.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.api_version = api_version - self.kind = kind - self.name = name - self.uid = uid - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/persistent_volume_claim.py b/sdks/python/client/argo_workflows/model/persistent_volume_claim.py deleted file mode 100644 index ea59e936e081..000000000000 --- a/sdks/python/client/argo_workflows/model/persistent_volume_claim.py +++ /dev/null @@ -1,281 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.object_meta import ObjectMeta - from argo_workflows.model.persistent_volume_claim_spec import PersistentVolumeClaimSpec - from argo_workflows.model.persistent_volume_claim_status import PersistentVolumeClaimStatus - globals()['ObjectMeta'] = ObjectMeta - globals()['PersistentVolumeClaimSpec'] = PersistentVolumeClaimSpec - globals()['PersistentVolumeClaimStatus'] = PersistentVolumeClaimStatus - - -class PersistentVolumeClaim(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'api_version': (str,), # noqa: E501 - 'kind': (str,), # noqa: E501 - 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (PersistentVolumeClaimSpec,), # noqa: E501 - 'status': (PersistentVolumeClaimStatus,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'api_version': 'apiVersion', # noqa: E501 - 'kind': 'kind', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'status': 'status', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaim - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - metadata (ObjectMeta): [optional] # noqa: E501 - spec (PersistentVolumeClaimSpec): [optional] # noqa: E501 - status (PersistentVolumeClaimStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaim - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501 - kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501 - metadata (ObjectMeta): [optional] # noqa: E501 - spec (PersistentVolumeClaimSpec): [optional] # noqa: E501 - status (PersistentVolumeClaimStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/persistent_volume_claim_condition.py b/sdks/python/client/argo_workflows/model/persistent_volume_claim_condition.py deleted file mode 100644 index 10c3ee2b90d0..000000000000 --- a/sdks/python/client/argo_workflows/model/persistent_volume_claim_condition.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class PersistentVolumeClaimCondition(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ('type',): { - 'FILESYSTEMRESIZEPENDING': "FileSystemResizePending", - 'RESIZING': "Resizing", - }, - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'status': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - 'last_probe_time': (datetime,), # noqa: E501 - 'last_transition_time': (datetime,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'reason': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'status': 'status', # noqa: E501 - 'type': 'type', # noqa: E501 - 'last_probe_time': 'lastProbeTime', # noqa: E501 - 'last_transition_time': 'lastTransitionTime', # noqa: E501 - 'message': 'message', # noqa: E501 - 'reason': 'reason', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, status, type, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimCondition - a model defined in OpenAPI - - Args: - status (str): - type (str): Possible enum values: - `\"FileSystemResizePending\"` - controller resize is finished and a file system resize is pending on node - `\"Resizing\"` - a user trigger resize of pvc has been started - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - last_probe_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - last_transition_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): Human-readable message indicating details about last transition.. [optional] # noqa: E501 - reason (str): Unique, this should be a short, machine understandable string that gives the reason for condition's last transition. If it reports \"ResizeStarted\" that means the underlying persistent volume is being resized.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.status = status - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, status, type, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimCondition - a model defined in OpenAPI - - Args: - status (str): - type (str): Possible enum values: - `\"FileSystemResizePending\"` - controller resize is finished and a file system resize is pending on node - `\"Resizing\"` - a user trigger resize of pvc has been started - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - last_probe_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - last_transition_time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - message (str): Human-readable message indicating details about last transition.. [optional] # noqa: E501 - reason (str): Unique, this should be a short, machine understandable string that gives the reason for condition's last transition. If it reports \"ResizeStarted\" that means the underlying persistent volume is being resized.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.status = status - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/persistent_volume_claim_spec.py b/sdks/python/client/argo_workflows/model/persistent_volume_claim_spec.py deleted file mode 100644 index d610e2305ec4..000000000000 --- a/sdks/python/client/argo_workflows/model/persistent_volume_claim_spec.py +++ /dev/null @@ -1,293 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.label_selector import LabelSelector - from argo_workflows.model.resource_requirements import ResourceRequirements - from argo_workflows.model.typed_local_object_reference import TypedLocalObjectReference - globals()['LabelSelector'] = LabelSelector - globals()['ResourceRequirements'] = ResourceRequirements - globals()['TypedLocalObjectReference'] = TypedLocalObjectReference - - -class PersistentVolumeClaimSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_modes': ([str],), # noqa: E501 - 'data_source': (TypedLocalObjectReference,), # noqa: E501 - 'data_source_ref': (TypedLocalObjectReference,), # noqa: E501 - 'resources': (ResourceRequirements,), # noqa: E501 - 'selector': (LabelSelector,), # noqa: E501 - 'storage_class_name': (str,), # noqa: E501 - 'volume_mode': (str,), # noqa: E501 - 'volume_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_modes': 'accessModes', # noqa: E501 - 'data_source': 'dataSource', # noqa: E501 - 'data_source_ref': 'dataSourceRef', # noqa: E501 - 'resources': 'resources', # noqa: E501 - 'selector': 'selector', # noqa: E501 - 'storage_class_name': 'storageClassName', # noqa: E501 - 'volume_mode': 'volumeMode', # noqa: E501 - 'volume_name': 'volumeName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_modes ([str]): AccessModes contains the desired access modes the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1. [optional] # noqa: E501 - data_source (TypedLocalObjectReference): [optional] # noqa: E501 - data_source_ref (TypedLocalObjectReference): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - selector (LabelSelector): [optional] # noqa: E501 - storage_class_name (str): Name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1. [optional] # noqa: E501 - volume_mode (str): volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec.. [optional] # noqa: E501 - volume_name (str): VolumeName is the binding reference to the PersistentVolume backing this claim.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_modes ([str]): AccessModes contains the desired access modes the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1. [optional] # noqa: E501 - data_source (TypedLocalObjectReference): [optional] # noqa: E501 - data_source_ref (TypedLocalObjectReference): [optional] # noqa: E501 - resources (ResourceRequirements): [optional] # noqa: E501 - selector (LabelSelector): [optional] # noqa: E501 - storage_class_name (str): Name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1. [optional] # noqa: E501 - volume_mode (str): volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec.. [optional] # noqa: E501 - volume_name (str): VolumeName is the binding reference to the PersistentVolume backing this claim.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/persistent_volume_claim_status.py b/sdks/python/client/argo_workflows/model/persistent_volume_claim_status.py deleted file mode 100644 index e314a90e0ff4..000000000000 --- a/sdks/python/client/argo_workflows/model/persistent_volume_claim_status.py +++ /dev/null @@ -1,286 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.persistent_volume_claim_condition import PersistentVolumeClaimCondition - globals()['PersistentVolumeClaimCondition'] = PersistentVolumeClaimCondition - - -class PersistentVolumeClaimStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ('phase',): { - 'BOUND': "Bound", - 'LOST': "Lost", - 'PENDING': "Pending", - }, - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'access_modes': ([str],), # noqa: E501 - 'allocated_resources': ({str: (str,)},), # noqa: E501 - 'capacity': ({str: (str,)},), # noqa: E501 - 'conditions': ([PersistentVolumeClaimCondition],), # noqa: E501 - 'phase': (str,), # noqa: E501 - 'resize_status': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'access_modes': 'accessModes', # noqa: E501 - 'allocated_resources': 'allocatedResources', # noqa: E501 - 'capacity': 'capacity', # noqa: E501 - 'conditions': 'conditions', # noqa: E501 - 'phase': 'phase', # noqa: E501 - 'resize_status': 'resizeStatus', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_modes ([str]): AccessModes contains the actual access modes the volume backing the PVC has. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1. [optional] # noqa: E501 - allocated_resources ({str: (str,)}): The storage resource within AllocatedResources tracks the capacity allocated to a PVC. It may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature.. [optional] # noqa: E501 - capacity ({str: (str,)}): Represents the actual resources of the underlying volume.. [optional] # noqa: E501 - conditions ([PersistentVolumeClaimCondition]): Current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'ResizeStarted'.. [optional] # noqa: E501 - phase (str): Phase represents the current phase of PersistentVolumeClaim. Possible enum values: - `\"Bound\"` used for PersistentVolumeClaims that are bound - `\"Lost\"` used for PersistentVolumeClaims that lost their underlying PersistentVolume. The claim was bound to a PersistentVolume and this volume does not exist any longer and all data on it was lost. - `\"Pending\"` used for PersistentVolumeClaims that are not yet bound. [optional] # noqa: E501 - resize_status (str): ResizeStatus stores status of resize operation. ResizeStatus is not set by default but when expansion is complete resizeStatus is set to empty string by resize controller or kubelet. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - access_modes ([str]): AccessModes contains the actual access modes the volume backing the PVC has. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1. [optional] # noqa: E501 - allocated_resources ({str: (str,)}): The storage resource within AllocatedResources tracks the capacity allocated to a PVC. It may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature.. [optional] # noqa: E501 - capacity ({str: (str,)}): Represents the actual resources of the underlying volume.. [optional] # noqa: E501 - conditions ([PersistentVolumeClaimCondition]): Current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'ResizeStarted'.. [optional] # noqa: E501 - phase (str): Phase represents the current phase of PersistentVolumeClaim. Possible enum values: - `\"Bound\"` used for PersistentVolumeClaims that are bound - `\"Lost\"` used for PersistentVolumeClaims that lost their underlying PersistentVolume. The claim was bound to a PersistentVolume and this volume does not exist any longer and all data on it was lost. - `\"Pending\"` used for PersistentVolumeClaims that are not yet bound. [optional] # noqa: E501 - resize_status (str): ResizeStatus stores status of resize operation. ResizeStatus is not set by default but when expansion is complete resizeStatus is set to empty string by resize controller or kubelet. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/persistent_volume_claim_template.py b/sdks/python/client/argo_workflows/model/persistent_volume_claim_template.py deleted file mode 100644 index 6bb9b943775c..000000000000 --- a/sdks/python/client/argo_workflows/model/persistent_volume_claim_template.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.object_meta import ObjectMeta - from argo_workflows.model.persistent_volume_claim_spec import PersistentVolumeClaimSpec - globals()['ObjectMeta'] = ObjectMeta - globals()['PersistentVolumeClaimSpec'] = PersistentVolumeClaimSpec - - -class PersistentVolumeClaimTemplate(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'spec': (PersistentVolumeClaimSpec,), # noqa: E501 - 'metadata': (ObjectMeta,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'spec': 'spec', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, spec, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimTemplate - a model defined in OpenAPI - - Args: - spec (PersistentVolumeClaimSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, spec, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimTemplate - a model defined in OpenAPI - - Args: - spec (PersistentVolumeClaimSpec): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - metadata (ObjectMeta): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.spec = spec - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/persistent_volume_claim_volume_source.py b/sdks/python/client/argo_workflows/model/persistent_volume_claim_volume_source.py deleted file mode 100644 index 062d79312018..000000000000 --- a/sdks/python/client/argo_workflows/model/persistent_volume_claim_volume_source.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class PersistentVolumeClaimVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'claim_name': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'claim_name': 'claimName', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, claim_name, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimVolumeSource - a model defined in OpenAPI - - Args: - claim_name (str): ClaimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - read_only (bool): Will force the ReadOnly setting in VolumeMounts. Default false.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.claim_name = claim_name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, claim_name, *args, **kwargs): # noqa: E501 - """PersistentVolumeClaimVolumeSource - a model defined in OpenAPI - - Args: - claim_name (str): ClaimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - read_only (bool): Will force the ReadOnly setting in VolumeMounts. Default false.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.claim_name = claim_name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/photon_persistent_disk_volume_source.py b/sdks/python/client/argo_workflows/model/photon_persistent_disk_volume_source.py deleted file mode 100644 index 44081c16b657..000000000000 --- a/sdks/python/client/argo_workflows/model/photon_persistent_disk_volume_source.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class PhotonPersistentDiskVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'pd_id': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'pd_id': 'pdID', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, pd_id, *args, **kwargs): # noqa: E501 - """PhotonPersistentDiskVolumeSource - a model defined in OpenAPI - - Args: - pd_id (str): ID that identifies Photon Controller persistent disk - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.pd_id = pd_id - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, pd_id, *args, **kwargs): # noqa: E501 - """PhotonPersistentDiskVolumeSource - a model defined in OpenAPI - - Args: - pd_id (str): ID that identifies Photon Controller persistent disk - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.pd_id = pd_id - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/pod_affinity.py b/sdks/python/client/argo_workflows/model/pod_affinity.py deleted file mode 100644 index 067d603b2008..000000000000 --- a/sdks/python/client/argo_workflows/model/pod_affinity.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.pod_affinity_term import PodAffinityTerm - from argo_workflows.model.weighted_pod_affinity_term import WeightedPodAffinityTerm - globals()['PodAffinityTerm'] = PodAffinityTerm - globals()['WeightedPodAffinityTerm'] = WeightedPodAffinityTerm - - -class PodAffinity(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'preferred_during_scheduling_ignored_during_execution': ([WeightedPodAffinityTerm],), # noqa: E501 - 'required_during_scheduling_ignored_during_execution': ([PodAffinityTerm],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'preferred_during_scheduling_ignored_during_execution': 'preferredDuringSchedulingIgnoredDuringExecution', # noqa: E501 - 'required_during_scheduling_ignored_during_execution': 'requiredDuringSchedulingIgnoredDuringExecution', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PodAffinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - preferred_during_scheduling_ignored_during_execution ([WeightedPodAffinityTerm]): The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.. [optional] # noqa: E501 - required_during_scheduling_ignored_during_execution ([PodAffinityTerm]): If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PodAffinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - preferred_during_scheduling_ignored_during_execution ([WeightedPodAffinityTerm]): The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.. [optional] # noqa: E501 - required_during_scheduling_ignored_during_execution ([PodAffinityTerm]): If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/pod_affinity_term.py b/sdks/python/client/argo_workflows/model/pod_affinity_term.py deleted file mode 100644 index 50f340e70553..000000000000 --- a/sdks/python/client/argo_workflows/model/pod_affinity_term.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.label_selector import LabelSelector - globals()['LabelSelector'] = LabelSelector - - -class PodAffinityTerm(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'topology_key': (str,), # noqa: E501 - 'label_selector': (LabelSelector,), # noqa: E501 - 'namespace_selector': (LabelSelector,), # noqa: E501 - 'namespaces': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'topology_key': 'topologyKey', # noqa: E501 - 'label_selector': 'labelSelector', # noqa: E501 - 'namespace_selector': 'namespaceSelector', # noqa: E501 - 'namespaces': 'namespaces', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, topology_key, *args, **kwargs): # noqa: E501 - """PodAffinityTerm - a model defined in OpenAPI - - Args: - topology_key (str): This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - label_selector (LabelSelector): [optional] # noqa: E501 - namespace_selector (LabelSelector): [optional] # noqa: E501 - namespaces ([str]): namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means \"this pod's namespace\". [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.topology_key = topology_key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, topology_key, *args, **kwargs): # noqa: E501 - """PodAffinityTerm - a model defined in OpenAPI - - Args: - topology_key (str): This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - label_selector (LabelSelector): [optional] # noqa: E501 - namespace_selector (LabelSelector): [optional] # noqa: E501 - namespaces ([str]): namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means \"this pod's namespace\". [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.topology_key = topology_key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/pod_anti_affinity.py b/sdks/python/client/argo_workflows/model/pod_anti_affinity.py deleted file mode 100644 index 5d44b3b27200..000000000000 --- a/sdks/python/client/argo_workflows/model/pod_anti_affinity.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.pod_affinity_term import PodAffinityTerm - from argo_workflows.model.weighted_pod_affinity_term import WeightedPodAffinityTerm - globals()['PodAffinityTerm'] = PodAffinityTerm - globals()['WeightedPodAffinityTerm'] = WeightedPodAffinityTerm - - -class PodAntiAffinity(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'preferred_during_scheduling_ignored_during_execution': ([WeightedPodAffinityTerm],), # noqa: E501 - 'required_during_scheduling_ignored_during_execution': ([PodAffinityTerm],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'preferred_during_scheduling_ignored_during_execution': 'preferredDuringSchedulingIgnoredDuringExecution', # noqa: E501 - 'required_during_scheduling_ignored_during_execution': 'requiredDuringSchedulingIgnoredDuringExecution', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PodAntiAffinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - preferred_during_scheduling_ignored_during_execution ([WeightedPodAffinityTerm]): The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.. [optional] # noqa: E501 - required_during_scheduling_ignored_during_execution ([PodAffinityTerm]): If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PodAntiAffinity - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - preferred_during_scheduling_ignored_during_execution ([WeightedPodAffinityTerm]): The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.. [optional] # noqa: E501 - required_during_scheduling_ignored_during_execution ([PodAffinityTerm]): If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/pod_dns_config.py b/sdks/python/client/argo_workflows/model/pod_dns_config.py deleted file mode 100644 index 078055305e19..000000000000 --- a/sdks/python/client/argo_workflows/model/pod_dns_config.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.pod_dns_config_option import PodDNSConfigOption - globals()['PodDNSConfigOption'] = PodDNSConfigOption - - -class PodDNSConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'nameservers': ([str],), # noqa: E501 - 'options': ([PodDNSConfigOption],), # noqa: E501 - 'searches': ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'nameservers': 'nameservers', # noqa: E501 - 'options': 'options', # noqa: E501 - 'searches': 'searches', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PodDNSConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - nameservers ([str]): A list of DNS name server IP addresses. This will be appended to the base nameservers generated from DNSPolicy. Duplicated nameservers will be removed.. [optional] # noqa: E501 - options ([PodDNSConfigOption]): A list of DNS resolver options. This will be merged with the base options generated from DNSPolicy. Duplicated entries will be removed. Resolution options given in Options will override those that appear in the base DNSPolicy.. [optional] # noqa: E501 - searches ([str]): A list of DNS search domains for host-name lookup. This will be appended to the base search paths generated from DNSPolicy. Duplicated search paths will be removed.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PodDNSConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - nameservers ([str]): A list of DNS name server IP addresses. This will be appended to the base nameservers generated from DNSPolicy. Duplicated nameservers will be removed.. [optional] # noqa: E501 - options ([PodDNSConfigOption]): A list of DNS resolver options. This will be merged with the base options generated from DNSPolicy. Duplicated entries will be removed. Resolution options given in Options will override those that appear in the base DNSPolicy.. [optional] # noqa: E501 - searches ([str]): A list of DNS search domains for host-name lookup. This will be appended to the base search paths generated from DNSPolicy. Duplicated search paths will be removed.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/pod_dns_config_option.py b/sdks/python/client/argo_workflows/model/pod_dns_config_option.py deleted file mode 100644 index 154ae32daec1..000000000000 --- a/sdks/python/client/argo_workflows/model/pod_dns_config_option.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class PodDNSConfigOption(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PodDNSConfigOption - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Required.. [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PodDNSConfigOption - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Required.. [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/pod_security_context.py b/sdks/python/client/argo_workflows/model/pod_security_context.py deleted file mode 100644 index 0ed02f420fcd..000000000000 --- a/sdks/python/client/argo_workflows/model/pod_security_context.py +++ /dev/null @@ -1,303 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.se_linux_options import SELinuxOptions - from argo_workflows.model.seccomp_profile import SeccompProfile - from argo_workflows.model.sysctl import Sysctl - from argo_workflows.model.windows_security_context_options import WindowsSecurityContextOptions - globals()['SELinuxOptions'] = SELinuxOptions - globals()['SeccompProfile'] = SeccompProfile - globals()['Sysctl'] = Sysctl - globals()['WindowsSecurityContextOptions'] = WindowsSecurityContextOptions - - -class PodSecurityContext(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'fs_group': (int,), # noqa: E501 - 'fs_group_change_policy': (str,), # noqa: E501 - 'run_as_group': (int,), # noqa: E501 - 'run_as_non_root': (bool,), # noqa: E501 - 'run_as_user': (int,), # noqa: E501 - 'se_linux_options': (SELinuxOptions,), # noqa: E501 - 'seccomp_profile': (SeccompProfile,), # noqa: E501 - 'supplemental_groups': ([int],), # noqa: E501 - 'sysctls': ([Sysctl],), # noqa: E501 - 'windows_options': (WindowsSecurityContextOptions,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'fs_group': 'fsGroup', # noqa: E501 - 'fs_group_change_policy': 'fsGroupChangePolicy', # noqa: E501 - 'run_as_group': 'runAsGroup', # noqa: E501 - 'run_as_non_root': 'runAsNonRoot', # noqa: E501 - 'run_as_user': 'runAsUser', # noqa: E501 - 'se_linux_options': 'seLinuxOptions', # noqa: E501 - 'seccomp_profile': 'seccompProfile', # noqa: E501 - 'supplemental_groups': 'supplementalGroups', # noqa: E501 - 'sysctls': 'sysctls', # noqa: E501 - 'windows_options': 'windowsOptions', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PodSecurityContext - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_group (int): A special supplemental group that applies to all containers in a pod. Some volume types allow the Kubelet to change the ownership of that volume to be owned by the pod: 1. The owning GID will be the FSGroup 2. The setgid bit is set (new files created in the volume will be owned by FSGroup) 3. The permission bits are OR'd with rw-rw---- If unset, the Kubelet will not modify the ownership and permissions of any volume. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - fs_group_change_policy (str): fsGroupChangePolicy defines behavior of changing ownership and permission of the volume before being exposed inside Pod. This field will only apply to volume types which support fsGroup based ownership(and permissions). It will have no effect on ephemeral volume types such as: secret, configmaps and emptydir. Valid values are \"OnRootMismatch\" and \"Always\". If not specified, \"Always\" is used. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - run_as_group (int): The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - run_as_non_root (bool): Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 - run_as_user (int): The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - se_linux_options (SELinuxOptions): [optional] # noqa: E501 - seccomp_profile (SeccompProfile): [optional] # noqa: E501 - supplemental_groups ([int]): A list of groups applied to the first process run in each container, in addition to the container's primary GID. If unspecified, no groups will be added to any container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - sysctls ([Sysctl]): Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - windows_options (WindowsSecurityContextOptions): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PodSecurityContext - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_group (int): A special supplemental group that applies to all containers in a pod. Some volume types allow the Kubelet to change the ownership of that volume to be owned by the pod: 1. The owning GID will be the FSGroup 2. The setgid bit is set (new files created in the volume will be owned by FSGroup) 3. The permission bits are OR'd with rw-rw---- If unset, the Kubelet will not modify the ownership and permissions of any volume. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - fs_group_change_policy (str): fsGroupChangePolicy defines behavior of changing ownership and permission of the volume before being exposed inside Pod. This field will only apply to volume types which support fsGroup based ownership(and permissions). It will have no effect on ephemeral volume types such as: secret, configmaps and emptydir. Valid values are \"OnRootMismatch\" and \"Always\". If not specified, \"Always\" is used. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - run_as_group (int): The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - run_as_non_root (bool): Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 - run_as_user (int): The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - se_linux_options (SELinuxOptions): [optional] # noqa: E501 - seccomp_profile (SeccompProfile): [optional] # noqa: E501 - supplemental_groups ([int]): A list of groups applied to the first process run in each container, in addition to the container's primary GID. If unspecified, no groups will be added to any container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - sysctls ([Sysctl]): Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - windows_options (WindowsSecurityContextOptions): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/portworx_volume_source.py b/sdks/python/client/argo_workflows/model/portworx_volume_source.py deleted file mode 100644 index 01c61e0d232f..000000000000 --- a/sdks/python/client/argo_workflows/model/portworx_volume_source.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class PortworxVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'volume_id': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'volume_id': 'volumeID', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, volume_id, *args, **kwargs): # noqa: E501 - """PortworxVolumeSource - a model defined in OpenAPI - - Args: - volume_id (str): VolumeID uniquely identifies a Portworx volume - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): FSType represents the filesystem type to mount Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.volume_id = volume_id - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, volume_id, *args, **kwargs): # noqa: E501 - """PortworxVolumeSource - a model defined in OpenAPI - - Args: - volume_id (str): VolumeID uniquely identifies a Portworx volume - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): FSType represents the filesystem type to mount Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.volume_id = volume_id - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/preferred_scheduling_term.py b/sdks/python/client/argo_workflows/model/preferred_scheduling_term.py deleted file mode 100644 index 472a021cd4a4..000000000000 --- a/sdks/python/client/argo_workflows/model/preferred_scheduling_term.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.node_selector_term import NodeSelectorTerm - globals()['NodeSelectorTerm'] = NodeSelectorTerm - - -class PreferredSchedulingTerm(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'preference': (NodeSelectorTerm,), # noqa: E501 - 'weight': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'preference': 'preference', # noqa: E501 - 'weight': 'weight', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, preference, weight, *args, **kwargs): # noqa: E501 - """PreferredSchedulingTerm - a model defined in OpenAPI - - Args: - preference (NodeSelectorTerm): - weight (int): Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.preference = preference - self.weight = weight - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, preference, weight, *args, **kwargs): # noqa: E501 - """PreferredSchedulingTerm - a model defined in OpenAPI - - Args: - preference (NodeSelectorTerm): - weight (int): Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.preference = preference - self.weight = weight - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/probe.py b/sdks/python/client/argo_workflows/model/probe.py deleted file mode 100644 index 0536c3794a88..000000000000 --- a/sdks/python/client/argo_workflows/model/probe.py +++ /dev/null @@ -1,303 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.exec_action import ExecAction - from argo_workflows.model.grpc_action import GRPCAction - from argo_workflows.model.http_get_action import HTTPGetAction - from argo_workflows.model.tcp_socket_action import TCPSocketAction - globals()['ExecAction'] = ExecAction - globals()['GRPCAction'] = GRPCAction - globals()['HTTPGetAction'] = HTTPGetAction - globals()['TCPSocketAction'] = TCPSocketAction - - -class Probe(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - '_exec': (ExecAction,), # noqa: E501 - 'failure_threshold': (int,), # noqa: E501 - 'grpc': (GRPCAction,), # noqa: E501 - 'http_get': (HTTPGetAction,), # noqa: E501 - 'initial_delay_seconds': (int,), # noqa: E501 - 'period_seconds': (int,), # noqa: E501 - 'success_threshold': (int,), # noqa: E501 - 'tcp_socket': (TCPSocketAction,), # noqa: E501 - 'termination_grace_period_seconds': (int,), # noqa: E501 - 'timeout_seconds': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - '_exec': 'exec', # noqa: E501 - 'failure_threshold': 'failureThreshold', # noqa: E501 - 'grpc': 'grpc', # noqa: E501 - 'http_get': 'httpGet', # noqa: E501 - 'initial_delay_seconds': 'initialDelaySeconds', # noqa: E501 - 'period_seconds': 'periodSeconds', # noqa: E501 - 'success_threshold': 'successThreshold', # noqa: E501 - 'tcp_socket': 'tcpSocket', # noqa: E501 - 'termination_grace_period_seconds': 'terminationGracePeriodSeconds', # noqa: E501 - 'timeout_seconds': 'timeoutSeconds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """Probe - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - _exec (ExecAction): [optional] # noqa: E501 - failure_threshold (int): Minimum consecutive failures for the probe to be considered failed after having succeeded. Defaults to 3. Minimum value is 1.. [optional] # noqa: E501 - grpc (GRPCAction): [optional] # noqa: E501 - http_get (HTTPGetAction): [optional] # noqa: E501 - initial_delay_seconds (int): Number of seconds after the container has started before liveness probes are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. [optional] # noqa: E501 - period_seconds (int): How often (in seconds) to perform the probe. Default to 10 seconds. Minimum value is 1.. [optional] # noqa: E501 - success_threshold (int): Minimum consecutive successes for the probe to be considered successful after having failed. Defaults to 1. Must be 1 for liveness and startup. Minimum value is 1.. [optional] # noqa: E501 - tcp_socket (TCPSocketAction): [optional] # noqa: E501 - termination_grace_period_seconds (int): Optional duration in seconds the pod needs to terminate gracefully upon probe failure. The grace period is the duration in seconds after the processes running in the pod are sent a termination signal and the time when the processes are forcibly halted with a kill signal. Set this value longer than the expected cleanup time for your process. If this value is nil, the pod's terminationGracePeriodSeconds will be used. Otherwise, this value overrides the value provided by the pod spec. Value must be non-negative integer. The value zero indicates stop immediately via the kill signal (no opportunity to shut down). This is a beta field and requires enabling ProbeTerminationGracePeriod feature gate. Minimum value is 1. spec.terminationGracePeriodSeconds is used if unset.. [optional] # noqa: E501 - timeout_seconds (int): Number of seconds after which the probe times out. Defaults to 1 second. Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """Probe - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - _exec (ExecAction): [optional] # noqa: E501 - failure_threshold (int): Minimum consecutive failures for the probe to be considered failed after having succeeded. Defaults to 3. Minimum value is 1.. [optional] # noqa: E501 - grpc (GRPCAction): [optional] # noqa: E501 - http_get (HTTPGetAction): [optional] # noqa: E501 - initial_delay_seconds (int): Number of seconds after the container has started before liveness probes are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. [optional] # noqa: E501 - period_seconds (int): How often (in seconds) to perform the probe. Default to 10 seconds. Minimum value is 1.. [optional] # noqa: E501 - success_threshold (int): Minimum consecutive successes for the probe to be considered successful after having failed. Defaults to 1. Must be 1 for liveness and startup. Minimum value is 1.. [optional] # noqa: E501 - tcp_socket (TCPSocketAction): [optional] # noqa: E501 - termination_grace_period_seconds (int): Optional duration in seconds the pod needs to terminate gracefully upon probe failure. The grace period is the duration in seconds after the processes running in the pod are sent a termination signal and the time when the processes are forcibly halted with a kill signal. Set this value longer than the expected cleanup time for your process. If this value is nil, the pod's terminationGracePeriodSeconds will be used. Otherwise, this value overrides the value provided by the pod spec. Value must be non-negative integer. The value zero indicates stop immediately via the kill signal (no opportunity to shut down). This is a beta field and requires enabling ProbeTerminationGracePeriod feature gate. Minimum value is 1. spec.terminationGracePeriodSeconds is used if unset.. [optional] # noqa: E501 - timeout_seconds (int): Number of seconds after which the probe times out. Defaults to 1 second. Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/projected_volume_source.py b/sdks/python/client/argo_workflows/model/projected_volume_source.py deleted file mode 100644 index d0cdb60f6254..000000000000 --- a/sdks/python/client/argo_workflows/model/projected_volume_source.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.volume_projection import VolumeProjection - globals()['VolumeProjection'] = VolumeProjection - - -class ProjectedVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'default_mode': (int,), # noqa: E501 - 'sources': ([VolumeProjection],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'default_mode': 'defaultMode', # noqa: E501 - 'sources': 'sources', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ProjectedVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default_mode (int): Mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - sources ([VolumeProjection]): list of volume projections. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ProjectedVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default_mode (int): Mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - sources ([VolumeProjection]): list of volume projections. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/quobyte_volume_source.py b/sdks/python/client/argo_workflows/model/quobyte_volume_source.py deleted file mode 100644 index 644e470873ef..000000000000 --- a/sdks/python/client/argo_workflows/model/quobyte_volume_source.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class QuobyteVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'registry': (str,), # noqa: E501 - 'volume': (str,), # noqa: E501 - 'group': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'tenant': (str,), # noqa: E501 - 'user': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'registry': 'registry', # noqa: E501 - 'volume': 'volume', # noqa: E501 - 'group': 'group', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'tenant': 'tenant', # noqa: E501 - 'user': 'user', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, registry, volume, *args, **kwargs): # noqa: E501 - """QuobyteVolumeSource - a model defined in OpenAPI - - Args: - registry (str): Registry represents a single or multiple Quobyte Registry services specified as a string as host:port pair (multiple entries are separated with commas) which acts as the central registry for volumes - volume (str): Volume is a string that references an already created Quobyte volume by name. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - group (str): Group to map volume access to Default is no group. [optional] # noqa: E501 - read_only (bool): ReadOnly here will force the Quobyte volume to be mounted with read-only permissions. Defaults to false.. [optional] # noqa: E501 - tenant (str): Tenant owning the given Quobyte volume in the Backend Used with dynamically provisioned Quobyte volumes, value is set by the plugin. [optional] # noqa: E501 - user (str): User to map volume access to Defaults to serivceaccount user. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.registry = registry - self.volume = volume - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, registry, volume, *args, **kwargs): # noqa: E501 - """QuobyteVolumeSource - a model defined in OpenAPI - - Args: - registry (str): Registry represents a single or multiple Quobyte Registry services specified as a string as host:port pair (multiple entries are separated with commas) which acts as the central registry for volumes - volume (str): Volume is a string that references an already created Quobyte volume by name. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - group (str): Group to map volume access to Default is no group. [optional] # noqa: E501 - read_only (bool): ReadOnly here will force the Quobyte volume to be mounted with read-only permissions. Defaults to false.. [optional] # noqa: E501 - tenant (str): Tenant owning the given Quobyte volume in the Backend Used with dynamically provisioned Quobyte volumes, value is set by the plugin. [optional] # noqa: E501 - user (str): User to map volume access to Defaults to serivceaccount user. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.registry = registry - self.volume = volume - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/rbd_volume_source.py b/sdks/python/client/argo_workflows/model/rbd_volume_source.py deleted file mode 100644 index 16633ebba3e0..000000000000 --- a/sdks/python/client/argo_workflows/model/rbd_volume_source.py +++ /dev/null @@ -1,297 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.local_object_reference import LocalObjectReference - globals()['LocalObjectReference'] = LocalObjectReference - - -class RBDVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'image': (str,), # noqa: E501 - 'monitors': ([str],), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'keyring': (str,), # noqa: E501 - 'pool': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'secret_ref': (LocalObjectReference,), # noqa: E501 - 'user': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'image': 'image', # noqa: E501 - 'monitors': 'monitors', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'keyring': 'keyring', # noqa: E501 - 'pool': 'pool', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'secret_ref': 'secretRef', # noqa: E501 - 'user': 'user', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, image, monitors, *args, **kwargs): # noqa: E501 - """RBDVolumeSource - a model defined in OpenAPI - - Args: - image (str): The rados image name. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it - monitors ([str]): A collection of Ceph monitors. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd. [optional] # noqa: E501 - keyring (str): Keyring is the path to key ring for RBDUser. Default is /etc/ceph/keyring. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it. [optional] # noqa: E501 - pool (str): The rados pool name. Default is rbd. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it. [optional] # noqa: E501 - read_only (bool): ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - user (str): The rados user name. Default is admin. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.image = image - self.monitors = monitors - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, image, monitors, *args, **kwargs): # noqa: E501 - """RBDVolumeSource - a model defined in OpenAPI - - Args: - image (str): The rados image name. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it - monitors ([str]): A collection of Ceph monitors. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd. [optional] # noqa: E501 - keyring (str): Keyring is the path to key ring for RBDUser. Default is /etc/ceph/keyring. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it. [optional] # noqa: E501 - pool (str): The rados pool name. Default is rbd. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it. [optional] # noqa: E501 - read_only (bool): ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - user (str): The rados user name. Default is admin. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.image = image - self.monitors = monitors - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/resource_field_selector.py b/sdks/python/client/argo_workflows/model/resource_field_selector.py deleted file mode 100644 index b00539c01ed6..000000000000 --- a/sdks/python/client/argo_workflows/model/resource_field_selector.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ResourceFieldSelector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'resource': (str,), # noqa: E501 - 'container_name': (str,), # noqa: E501 - 'divisor': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'resource': 'resource', # noqa: E501 - 'container_name': 'containerName', # noqa: E501 - 'divisor': 'divisor', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, resource, *args, **kwargs): # noqa: E501 - """ResourceFieldSelector - a model defined in OpenAPI - - Args: - resource (str): Required: resource to select - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - container_name (str): Container name: required for volumes, optional for env vars. [optional] # noqa: E501 - divisor (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.resource = resource - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, resource, *args, **kwargs): # noqa: E501 - """ResourceFieldSelector - a model defined in OpenAPI - - Args: - resource (str): Required: resource to select - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - container_name (str): Container name: required for volumes, optional for env vars. [optional] # noqa: E501 - divisor (str): Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.resource = resource - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/resource_requirements.py b/sdks/python/client/argo_workflows/model/resource_requirements.py deleted file mode 100644 index 9ba0fa404963..000000000000 --- a/sdks/python/client/argo_workflows/model/resource_requirements.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ResourceRequirements(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'limits': ({str: (str,)},), # noqa: E501 - 'requests': ({str: (str,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'limits': 'limits', # noqa: E501 - 'requests': 'requests', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ResourceRequirements - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - limits ({str: (str,)}): Limits describes the maximum amount of compute resources allowed. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/. [optional] # noqa: E501 - requests ({str: (str,)}): Requests describes the minimum amount of compute resources required. If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, otherwise to an implementation-defined value. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ResourceRequirements - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - limits ({str: (str,)}): Limits describes the maximum amount of compute resources allowed. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/. [optional] # noqa: E501 - requests ({str: (str,)}): Requests describes the minimum amount of compute resources required. If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, otherwise to an implementation-defined value. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/scale_io_volume_source.py b/sdks/python/client/argo_workflows/model/scale_io_volume_source.py deleted file mode 100644 index 7dd161b71e6e..000000000000 --- a/sdks/python/client/argo_workflows/model/scale_io_volume_source.py +++ /dev/null @@ -1,307 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.local_object_reference import LocalObjectReference - globals()['LocalObjectReference'] = LocalObjectReference - - -class ScaleIOVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'gateway': (str,), # noqa: E501 - 'secret_ref': (LocalObjectReference,), # noqa: E501 - 'system': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'protection_domain': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'ssl_enabled': (bool,), # noqa: E501 - 'storage_mode': (str,), # noqa: E501 - 'storage_pool': (str,), # noqa: E501 - 'volume_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'gateway': 'gateway', # noqa: E501 - 'secret_ref': 'secretRef', # noqa: E501 - 'system': 'system', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'protection_domain': 'protectionDomain', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'ssl_enabled': 'sslEnabled', # noqa: E501 - 'storage_mode': 'storageMode', # noqa: E501 - 'storage_pool': 'storagePool', # noqa: E501 - 'volume_name': 'volumeName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, gateway, secret_ref, system, *args, **kwargs): # noqa: E501 - """ScaleIOVolumeSource - a model defined in OpenAPI - - Args: - gateway (str): The host address of the ScaleIO API Gateway. - secret_ref (LocalObjectReference): - system (str): The name of the storage system as configured in ScaleIO. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Default is \"xfs\".. [optional] # noqa: E501 - protection_domain (str): The name of the ScaleIO Protection Domain for the configured storage.. [optional] # noqa: E501 - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - ssl_enabled (bool): Flag to enable/disable SSL communication with Gateway, default false. [optional] # noqa: E501 - storage_mode (str): Indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. Default is ThinProvisioned.. [optional] # noqa: E501 - storage_pool (str): The ScaleIO Storage Pool associated with the protection domain.. [optional] # noqa: E501 - volume_name (str): The name of a volume already created in the ScaleIO system that is associated with this volume source.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.gateway = gateway - self.secret_ref = secret_ref - self.system = system - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, gateway, secret_ref, system, *args, **kwargs): # noqa: E501 - """ScaleIOVolumeSource - a model defined in OpenAPI - - Args: - gateway (str): The host address of the ScaleIO API Gateway. - secret_ref (LocalObjectReference): - system (str): The name of the storage system as configured in ScaleIO. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Default is \"xfs\".. [optional] # noqa: E501 - protection_domain (str): The name of the ScaleIO Protection Domain for the configured storage.. [optional] # noqa: E501 - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - ssl_enabled (bool): Flag to enable/disable SSL communication with Gateway, default false. [optional] # noqa: E501 - storage_mode (str): Indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. Default is ThinProvisioned.. [optional] # noqa: E501 - storage_pool (str): The ScaleIO Storage Pool associated with the protection domain.. [optional] # noqa: E501 - volume_name (str): The name of a volume already created in the ScaleIO system that is associated with this volume source.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.gateway = gateway - self.secret_ref = secret_ref - self.system = system - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/se_linux_options.py b/sdks/python/client/argo_workflows/model/se_linux_options.py deleted file mode 100644 index 222d3f360279..000000000000 --- a/sdks/python/client/argo_workflows/model/se_linux_options.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class SELinuxOptions(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'level': (str,), # noqa: E501 - 'role': (str,), # noqa: E501 - 'type': (str,), # noqa: E501 - 'user': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'level': 'level', # noqa: E501 - 'role': 'role', # noqa: E501 - 'type': 'type', # noqa: E501 - 'user': 'user', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SELinuxOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - level (str): Level is SELinux level label that applies to the container.. [optional] # noqa: E501 - role (str): Role is a SELinux role label that applies to the container.. [optional] # noqa: E501 - type (str): Type is a SELinux type label that applies to the container.. [optional] # noqa: E501 - user (str): User is a SELinux user label that applies to the container.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SELinuxOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - level (str): Level is SELinux level label that applies to the container.. [optional] # noqa: E501 - role (str): Role is a SELinux role label that applies to the container.. [optional] # noqa: E501 - type (str): Type is a SELinux type label that applies to the container.. [optional] # noqa: E501 - user (str): User is a SELinux user label that applies to the container.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/seccomp_profile.py b/sdks/python/client/argo_workflows/model/seccomp_profile.py deleted file mode 100644 index fb07ca0bd919..000000000000 --- a/sdks/python/client/argo_workflows/model/seccomp_profile.py +++ /dev/null @@ -1,270 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class SeccompProfile(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ('type',): { - 'LOCALHOST': "Localhost", - 'RUNTIMEDEFAULT': "RuntimeDefault", - 'UNCONFINED': "Unconfined", - }, - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'type': (str,), # noqa: E501 - 'localhost_profile': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'type': 'type', # noqa: E501 - 'localhost_profile': 'localhostProfile', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, type, *args, **kwargs): # noqa: E501 - """SeccompProfile - a model defined in OpenAPI - - Args: - type (str): type indicates which kind of seccomp profile will be applied. Valid options are: Localhost - a profile defined in a file on the node should be used. RuntimeDefault - the container runtime default profile should be used. Unconfined - no profile should be applied. Possible enum values: - `\"Localhost\"` indicates a profile defined in a file on the node should be used. The file's location relative to /seccomp. - `\"RuntimeDefault\"` represents the default container runtime seccomp profile. - `\"Unconfined\"` indicates no seccomp profile is applied (A.K.A. unconfined). - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - localhost_profile (str): localhostProfile indicates a profile defined in a file on the node should be used. The profile must be preconfigured on the node to work. Must be a descending path, relative to the kubelet's configured seccomp profile location. Must only be set if type is \"Localhost\".. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, type, *args, **kwargs): # noqa: E501 - """SeccompProfile - a model defined in OpenAPI - - Args: - type (str): type indicates which kind of seccomp profile will be applied. Valid options are: Localhost - a profile defined in a file on the node should be used. RuntimeDefault - the container runtime default profile should be used. Unconfined - no profile should be applied. Possible enum values: - `\"Localhost\"` indicates a profile defined in a file on the node should be used. The file's location relative to /seccomp. - `\"RuntimeDefault\"` represents the default container runtime seccomp profile. - `\"Unconfined\"` indicates no seccomp profile is applied (A.K.A. unconfined). - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - localhost_profile (str): localhostProfile indicates a profile defined in a file on the node should be used. The profile must be preconfigured on the node to work. Must be a descending path, relative to the kubelet's configured seccomp profile location. Must only be set if type is \"Localhost\".. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.type = type - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/secret_env_source.py b/sdks/python/client/argo_workflows/model/secret_env_source.py deleted file mode 100644 index ad742cee49f6..000000000000 --- a/sdks/python/client/argo_workflows/model/secret_env_source.py +++ /dev/null @@ -1,259 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class SecretEnvSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'optional': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'optional': 'optional', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SecretEnvSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the Secret must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SecretEnvSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the Secret must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/secret_key_selector.py b/sdks/python/client/argo_workflows/model/secret_key_selector.py deleted file mode 100644 index ee91496b4748..000000000000 --- a/sdks/python/client/argo_workflows/model/secret_key_selector.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class SecretKeySelector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'key': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'optional': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'key': 'key', # noqa: E501 - 'name': 'name', # noqa: E501 - 'optional': 'optional', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, key, *args, **kwargs): # noqa: E501 - """SecretKeySelector - a model defined in OpenAPI - - Args: - key (str): The key of the secret to select from. Must be a valid secret key. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the Secret or its key must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, key, *args, **kwargs): # noqa: E501 - """SecretKeySelector - a model defined in OpenAPI - - Args: - key (str): The key of the secret to select from. Must be a valid secret key. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the Secret or its key must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.key = key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/secret_projection.py b/sdks/python/client/argo_workflows/model/secret_projection.py deleted file mode 100644 index 823c7c84fa43..000000000000 --- a/sdks/python/client/argo_workflows/model/secret_projection.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.key_to_path import KeyToPath - globals()['KeyToPath'] = KeyToPath - - -class SecretProjection(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'items': ([KeyToPath],), # noqa: E501 - 'name': (str,), # noqa: E501 - 'optional': (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'items': 'items', # noqa: E501 - 'name': 'name', # noqa: E501 - 'optional': 'optional', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SecretProjection - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([KeyToPath]): If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.. [optional] # noqa: E501 - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the Secret or its key must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SecretProjection - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - items ([KeyToPath]): If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.. [optional] # noqa: E501 - name (str): Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names. [optional] # noqa: E501 - optional (bool): Specify whether the Secret or its key must be defined. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/secret_volume_source.py b/sdks/python/client/argo_workflows/model/secret_volume_source.py deleted file mode 100644 index 54e0667cfd0c..000000000000 --- a/sdks/python/client/argo_workflows/model/secret_volume_source.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.key_to_path import KeyToPath - globals()['KeyToPath'] = KeyToPath - - -class SecretVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'default_mode': (int,), # noqa: E501 - 'items': ([KeyToPath],), # noqa: E501 - 'optional': (bool,), # noqa: E501 - 'secret_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'default_mode': 'defaultMode', # noqa: E501 - 'items': 'items', # noqa: E501 - 'optional': 'optional', # noqa: E501 - 'secret_name': 'secretName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SecretVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default_mode (int): Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - items ([KeyToPath]): If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.. [optional] # noqa: E501 - optional (bool): Specify whether the Secret or its keys must be defined. [optional] # noqa: E501 - secret_name (str): Name of the secret in the pod's namespace to use. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SecretVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - default_mode (int): Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - items ([KeyToPath]): If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.. [optional] # noqa: E501 - optional (bool): Specify whether the Secret or its keys must be defined. [optional] # noqa: E501 - secret_name (str): Name of the secret in the pod's namespace to use. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/security_context.py b/sdks/python/client/argo_workflows/model/security_context.py deleted file mode 100644 index 7dbf984223e8..000000000000 --- a/sdks/python/client/argo_workflows/model/security_context.py +++ /dev/null @@ -1,307 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.capabilities import Capabilities - from argo_workflows.model.se_linux_options import SELinuxOptions - from argo_workflows.model.seccomp_profile import SeccompProfile - from argo_workflows.model.windows_security_context_options import WindowsSecurityContextOptions - globals()['Capabilities'] = Capabilities - globals()['SELinuxOptions'] = SELinuxOptions - globals()['SeccompProfile'] = SeccompProfile - globals()['WindowsSecurityContextOptions'] = WindowsSecurityContextOptions - - -class SecurityContext(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'allow_privilege_escalation': (bool,), # noqa: E501 - 'capabilities': (Capabilities,), # noqa: E501 - 'privileged': (bool,), # noqa: E501 - 'proc_mount': (str,), # noqa: E501 - 'read_only_root_filesystem': (bool,), # noqa: E501 - 'run_as_group': (int,), # noqa: E501 - 'run_as_non_root': (bool,), # noqa: E501 - 'run_as_user': (int,), # noqa: E501 - 'se_linux_options': (SELinuxOptions,), # noqa: E501 - 'seccomp_profile': (SeccompProfile,), # noqa: E501 - 'windows_options': (WindowsSecurityContextOptions,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'allow_privilege_escalation': 'allowPrivilegeEscalation', # noqa: E501 - 'capabilities': 'capabilities', # noqa: E501 - 'privileged': 'privileged', # noqa: E501 - 'proc_mount': 'procMount', # noqa: E501 - 'read_only_root_filesystem': 'readOnlyRootFilesystem', # noqa: E501 - 'run_as_group': 'runAsGroup', # noqa: E501 - 'run_as_non_root': 'runAsNonRoot', # noqa: E501 - 'run_as_user': 'runAsUser', # noqa: E501 - 'se_linux_options': 'seLinuxOptions', # noqa: E501 - 'seccomp_profile': 'seccompProfile', # noqa: E501 - 'windows_options': 'windowsOptions', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SecurityContext - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - allow_privilege_escalation (bool): AllowPrivilegeEscalation controls whether a process can gain more privileges than its parent process. This bool directly controls if the no_new_privs flag will be set on the container process. AllowPrivilegeEscalation is true always when the container is: 1) run as Privileged 2) has CAP_SYS_ADMIN Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - capabilities (Capabilities): [optional] # noqa: E501 - privileged (bool): Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - proc_mount (str): procMount denotes the type of proc mount to use for the containers. The default is DefaultProcMount which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - read_only_root_filesystem (bool): Whether this container has a read-only root filesystem. Default is false. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - run_as_group (int): The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - run_as_non_root (bool): Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 - run_as_user (int): The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - se_linux_options (SELinuxOptions): [optional] # noqa: E501 - seccomp_profile (SeccompProfile): [optional] # noqa: E501 - windows_options (WindowsSecurityContextOptions): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SecurityContext - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - allow_privilege_escalation (bool): AllowPrivilegeEscalation controls whether a process can gain more privileges than its parent process. This bool directly controls if the no_new_privs flag will be set on the container process. AllowPrivilegeEscalation is true always when the container is: 1) run as Privileged 2) has CAP_SYS_ADMIN Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - capabilities (Capabilities): [optional] # noqa: E501 - privileged (bool): Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - proc_mount (str): procMount denotes the type of proc mount to use for the containers. The default is DefaultProcMount which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - read_only_root_filesystem (bool): Whether this container has a read-only root filesystem. Default is false. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - run_as_group (int): The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - run_as_non_root (bool): Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 - run_as_user (int): The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - se_linux_options (SELinuxOptions): [optional] # noqa: E501 - seccomp_profile (SeccompProfile): [optional] # noqa: E501 - windows_options (WindowsSecurityContextOptions): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/sensor_create_sensor_request.py b/sdks/python/client/argo_workflows/model/sensor_create_sensor_request.py deleted file mode 100644 index b77da79c3e2b..000000000000 --- a/sdks/python/client/argo_workflows/model/sensor_create_sensor_request.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.create_options import CreateOptions - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor - globals()['CreateOptions'] = CreateOptions - globals()['IoArgoprojEventsV1alpha1Sensor'] = IoArgoprojEventsV1alpha1Sensor - - -class SensorCreateSensorRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'create_options': (CreateOptions,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'sensor': (IoArgoprojEventsV1alpha1Sensor,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'create_options': 'createOptions', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'sensor': 'sensor', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SensorCreateSensorRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - sensor (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SensorCreateSensorRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - create_options (CreateOptions): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - sensor (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/sensor_log_entry.py b/sdks/python/client/argo_workflows/model/sensor_log_entry.py deleted file mode 100644 index 58f363a942af..000000000000 --- a/sdks/python/client/argo_workflows/model/sensor_log_entry.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class SensorLogEntry(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'dependency_name': (str,), # noqa: E501 - 'event_context': (str,), # noqa: E501 - 'level': (str,), # noqa: E501 - 'msg': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'sensor_name': (str,), # noqa: E501 - 'time': (datetime,), # noqa: E501 - 'trigger_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'dependency_name': 'dependencyName', # noqa: E501 - 'event_context': 'eventContext', # noqa: E501 - 'level': 'level', # noqa: E501 - 'msg': 'msg', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'sensor_name': 'sensorName', # noqa: E501 - 'time': 'time', # noqa: E501 - 'trigger_name': 'triggerName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SensorLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dependency_name (str): [optional] # noqa: E501 - event_context (str): [optional] # noqa: E501 - level (str): [optional] # noqa: E501 - msg (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - sensor_name (str): [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - trigger_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SensorLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - dependency_name (str): [optional] # noqa: E501 - event_context (str): [optional] # noqa: E501 - level (str): [optional] # noqa: E501 - msg (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - sensor_name (str): [optional] # noqa: E501 - time (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - trigger_name (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/sensor_sensor_watch_event.py b/sdks/python/client/argo_workflows/model/sensor_sensor_watch_event.py deleted file mode 100644 index 4792d7ec9408..000000000000 --- a/sdks/python/client/argo_workflows/model/sensor_sensor_watch_event.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor - globals()['IoArgoprojEventsV1alpha1Sensor'] = IoArgoprojEventsV1alpha1Sensor - - -class SensorSensorWatchEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'object': (IoArgoprojEventsV1alpha1Sensor,), # noqa: E501 - 'type': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'object': 'object', # noqa: E501 - 'type': 'type', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SensorSensorWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SensorSensorWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - object (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 - type (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/sensor_update_sensor_request.py b/sdks/python/client/argo_workflows/model/sensor_update_sensor_request.py deleted file mode 100644 index 4ea9205e3952..000000000000 --- a/sdks/python/client/argo_workflows/model/sensor_update_sensor_request.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor - globals()['IoArgoprojEventsV1alpha1Sensor'] = IoArgoprojEventsV1alpha1Sensor - - -class SensorUpdateSensorRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'sensor': (IoArgoprojEventsV1alpha1Sensor,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'sensor': 'sensor', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SensorUpdateSensorRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - sensor (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SensorUpdateSensorRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - namespace (str): [optional] # noqa: E501 - sensor (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/service_account_token_projection.py b/sdks/python/client/argo_workflows/model/service_account_token_projection.py deleted file mode 100644 index 2943aa80d2e5..000000000000 --- a/sdks/python/client/argo_workflows/model/service_account_token_projection.py +++ /dev/null @@ -1,269 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ServiceAccountTokenProjection(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'path': (str,), # noqa: E501 - 'audience': (str,), # noqa: E501 - 'expiration_seconds': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'path': 'path', # noqa: E501 - 'audience': 'audience', # noqa: E501 - 'expiration_seconds': 'expirationSeconds', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, path, *args, **kwargs): # noqa: E501 - """ServiceAccountTokenProjection - a model defined in OpenAPI - - Args: - path (str): Path is the path relative to the mount point of the file to project the token into. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - audience (str): Audience is the intended audience of the token. A recipient of a token must identify itself with an identifier specified in the audience of the token, and otherwise should reject the token. The audience defaults to the identifier of the apiserver.. [optional] # noqa: E501 - expiration_seconds (int): ExpirationSeconds is the requested duration of validity of the service account token. As the token approaches expiration, the kubelet volume plugin will proactively rotate the service account token. The kubelet will start trying to rotate the token if the token is older than 80 percent of its time to live or if the token is older than 24 hours.Defaults to 1 hour and must be at least 10 minutes.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, path, *args, **kwargs): # noqa: E501 - """ServiceAccountTokenProjection - a model defined in OpenAPI - - Args: - path (str): Path is the path relative to the mount point of the file to project the token into. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - audience (str): Audience is the intended audience of the token. A recipient of a token must identify itself with an identifier specified in the audience of the token, and otherwise should reject the token. The audience defaults to the identifier of the apiserver.. [optional] # noqa: E501 - expiration_seconds (int): ExpirationSeconds is the requested duration of validity of the service account token. As the token approaches expiration, the kubelet volume plugin will proactively rotate the service account token. The kubelet will start trying to rotate the token if the token is older than 80 percent of its time to live or if the token is older than 24 hours.Defaults to 1 hour and must be at least 10 minutes.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.path = path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/service_port.py b/sdks/python/client/argo_workflows/model/service_port.py deleted file mode 100644 index dd8493e10de9..000000000000 --- a/sdks/python/client/argo_workflows/model/service_port.py +++ /dev/null @@ -1,286 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class ServicePort(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ('protocol',): { - 'SCTP': "SCTP", - 'TCP': "TCP", - 'UDP': "UDP", - }, - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'port': (int,), # noqa: E501 - 'app_protocol': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'node_port': (int,), # noqa: E501 - 'protocol': (str,), # noqa: E501 - 'target_port': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'port': 'port', # noqa: E501 - 'app_protocol': 'appProtocol', # noqa: E501 - 'name': 'name', # noqa: E501 - 'node_port': 'nodePort', # noqa: E501 - 'protocol': 'protocol', # noqa: E501 - 'target_port': 'targetPort', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, port, *args, **kwargs): # noqa: E501 - """ServicePort - a model defined in OpenAPI - - Args: - port (int): The port that will be exposed by this service. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - app_protocol (str): The application protocol for this port. This field follows standard Kubernetes label syntax. Un-prefixed names are reserved for IANA standard service names (as per RFC-6335 and http://www.iana.org/assignments/service-names). Non-standard protocols should use prefixed names such as mycompany.com/my-custom-protocol.. [optional] # noqa: E501 - name (str): The name of this port within the service. This must be a DNS_LABEL. All ports within a ServiceSpec must have unique names. When considering the endpoints for a Service, this must match the 'name' field in the EndpointPort. Optional if only one ServicePort is defined on this service.. [optional] # noqa: E501 - node_port (int): The port on each node on which this service is exposed when type is NodePort or LoadBalancer. Usually assigned by the system. If a value is specified, in-range, and not in use it will be used, otherwise the operation will fail. If not specified, a port will be allocated if this Service requires one. If this field is specified when creating a Service which does not need it, creation will fail. This field will be wiped when updating a Service to no longer need it (e.g. changing type from NodePort to ClusterIP). More info: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport. [optional] # noqa: E501 - protocol (str): The IP protocol for this port. Supports \"TCP\", \"UDP\", and \"SCTP\". Default is TCP. Possible enum values: - `\"SCTP\"` is the SCTP protocol. - `\"TCP\"` is the TCP protocol. - `\"UDP\"` is the UDP protocol.. [optional] # noqa: E501 - target_port (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.port = port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, port, *args, **kwargs): # noqa: E501 - """ServicePort - a model defined in OpenAPI - - Args: - port (int): The port that will be exposed by this service. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - app_protocol (str): The application protocol for this port. This field follows standard Kubernetes label syntax. Un-prefixed names are reserved for IANA standard service names (as per RFC-6335 and http://www.iana.org/assignments/service-names). Non-standard protocols should use prefixed names such as mycompany.com/my-custom-protocol.. [optional] # noqa: E501 - name (str): The name of this port within the service. This must be a DNS_LABEL. All ports within a ServiceSpec must have unique names. When considering the endpoints for a Service, this must match the 'name' field in the EndpointPort. Optional if only one ServicePort is defined on this service.. [optional] # noqa: E501 - node_port (int): The port on each node on which this service is exposed when type is NodePort or LoadBalancer. Usually assigned by the system. If a value is specified, in-range, and not in use it will be used, otherwise the operation will fail. If not specified, a port will be allocated if this Service requires one. If this field is specified when creating a Service which does not need it, creation will fail. This field will be wiped when updating a Service to no longer need it (e.g. changing type from NodePort to ClusterIP). More info: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport. [optional] # noqa: E501 - protocol (str): The IP protocol for this port. Supports \"TCP\", \"UDP\", and \"SCTP\". Default is TCP. Possible enum values: - `\"SCTP\"` is the SCTP protocol. - `\"TCP\"` is the TCP protocol. - `\"UDP\"` is the UDP protocol.. [optional] # noqa: E501 - target_port (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.port = port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/status_cause.py b/sdks/python/client/argo_workflows/model/status_cause.py deleted file mode 100644 index a90e092af2d6..000000000000 --- a/sdks/python/client/argo_workflows/model/status_cause.py +++ /dev/null @@ -1,263 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class StatusCause(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'field': (str,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'reason': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'field': 'field', # noqa: E501 - 'message': 'message', # noqa: E501 - 'reason': 'reason', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StatusCause - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - field (str): The field of the resource that has caused this error, as named by its JSON serialization. May include dot and postfix notation for nested attributes. Arrays are zero-indexed. Fields may appear more than once in an array of causes due to fields having multiple errors. Optional. Examples: \"name\" - the field \"name\" on the current resource \"items[0].name\" - the field \"name\" on the first array entry in \"items\". [optional] # noqa: E501 - message (str): A human-readable description of the cause of the error. This field may be presented as-is to a reader.. [optional] # noqa: E501 - reason (str): A machine-readable description of the cause of the error. If this value is empty there is no information available.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StatusCause - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - field (str): The field of the resource that has caused this error, as named by its JSON serialization. May include dot and postfix notation for nested attributes. Arrays are zero-indexed. Fields may appear more than once in an array of causes due to fields having multiple errors. Optional. Examples: \"name\" - the field \"name\" on the current resource \"items[0].name\" - the field \"name\" on the first array entry in \"items\". [optional] # noqa: E501 - message (str): A human-readable description of the cause of the error. This field may be presented as-is to a reader.. [optional] # noqa: E501 - reason (str): A machine-readable description of the cause of the error. If this value is empty there is no information available.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/storage_os_volume_source.py b/sdks/python/client/argo_workflows/model/storage_os_volume_source.py deleted file mode 100644 index febb659bc55a..000000000000 --- a/sdks/python/client/argo_workflows/model/storage_os_volume_source.py +++ /dev/null @@ -1,277 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.local_object_reference import LocalObjectReference - globals()['LocalObjectReference'] = LocalObjectReference - - -class StorageOSVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'fs_type': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'secret_ref': (LocalObjectReference,), # noqa: E501 - 'volume_name': (str,), # noqa: E501 - 'volume_namespace': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'fs_type': 'fsType', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'secret_ref': 'secretRef', # noqa: E501 - 'volume_name': 'volumeName', # noqa: E501 - 'volume_namespace': 'volumeNamespace', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StorageOSVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - volume_name (str): VolumeName is the human-readable name of the StorageOS volume. Volume names are only unique within a namespace.. [optional] # noqa: E501 - volume_namespace (str): VolumeNamespace specifies the scope of the volume within StorageOS. If no namespace is specified then the Pod's namespace will be used. This allows the Kubernetes name scoping to be mirrored within StorageOS for tighter integration. Set VolumeName to any name to override the default behaviour. Set to \"default\" if you are not using namespaces within StorageOS. Namespaces that do not pre-exist within StorageOS will be created.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StorageOSVolumeSource - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - read_only (bool): Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.. [optional] # noqa: E501 - secret_ref (LocalObjectReference): [optional] # noqa: E501 - volume_name (str): VolumeName is the human-readable name of the StorageOS volume. Volume names are only unique within a namespace.. [optional] # noqa: E501 - volume_namespace (str): VolumeNamespace specifies the scope of the volume within StorageOS. If no namespace is specified then the Pod's namespace will be used. This allows the Kubernetes name scoping to be mirrored within StorageOS for tighter integration. Set VolumeName to any name to override the default behaviour. Set to \"default\" if you are not using namespaces within StorageOS. Namespaces that do not pre-exist within StorageOS will be created.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_event.py b/sdks/python/client/argo_workflows/model/stream_result_of_event.py deleted file mode 100644 index 0811ea086d92..000000000000 --- a/sdks/python/client/argo_workflows/model/stream_result_of_event.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.event import Event - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - globals()['Event'] = Event - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - - -class StreamResultOfEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (Event,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (Event): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (Event): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_eventsource_event_source_watch_event.py b/sdks/python/client/argo_workflows/model/stream_result_of_eventsource_event_source_watch_event.py deleted file mode 100644 index 1e3e069c7054..000000000000 --- a/sdks/python/client/argo_workflows/model/stream_result_of_eventsource_event_source_watch_event.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.eventsource_event_source_watch_event import EventsourceEventSourceWatchEvent - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - globals()['EventsourceEventSourceWatchEvent'] = EventsourceEventSourceWatchEvent - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - - -class StreamResultOfEventsourceEventSourceWatchEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (EventsourceEventSourceWatchEvent,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfEventsourceEventSourceWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (EventsourceEventSourceWatchEvent): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfEventsourceEventSourceWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (EventsourceEventSourceWatchEvent): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_eventsource_log_entry.py b/sdks/python/client/argo_workflows/model/stream_result_of_eventsource_log_entry.py deleted file mode 100644 index 7be4da080e80..000000000000 --- a/sdks/python/client/argo_workflows/model/stream_result_of_eventsource_log_entry.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.eventsource_log_entry import EventsourceLogEntry - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - globals()['EventsourceLogEntry'] = EventsourceLogEntry - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - - -class StreamResultOfEventsourceLogEntry(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (EventsourceLogEntry,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfEventsourceLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (EventsourceLogEntry): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfEventsourceLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (EventsourceLogEntry): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_io_argoproj_workflow_v1alpha1_log_entry.py b/sdks/python/client/argo_workflows/model/stream_result_of_io_argoproj_workflow_v1alpha1_log_entry.py deleted file mode 100644 index c507a9b1cbb3..000000000000 --- a/sdks/python/client/argo_workflows/model/stream_result_of_io_argoproj_workflow_v1alpha1_log_entry.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - from argo_workflows.model.io_argoproj_workflow_v1alpha1_log_entry import IoArgoprojWorkflowV1alpha1LogEntry - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - globals()['IoArgoprojWorkflowV1alpha1LogEntry'] = IoArgoprojWorkflowV1alpha1LogEntry - - -class StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (IoArgoprojWorkflowV1alpha1LogEntry,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (IoArgoprojWorkflowV1alpha1LogEntry): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (IoArgoprojWorkflowV1alpha1LogEntry): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event.py b/sdks/python/client/argo_workflows/model/stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event.py deleted file mode 100644 index 6f00ad8ded69..000000000000 --- a/sdks/python/client/argo_workflows/model/stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_watch_event import IoArgoprojWorkflowV1alpha1WorkflowWatchEvent - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - globals()['IoArgoprojWorkflowV1alpha1WorkflowWatchEvent'] = IoArgoprojWorkflowV1alpha1WorkflowWatchEvent - - -class StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (IoArgoprojWorkflowV1alpha1WorkflowWatchEvent,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (IoArgoprojWorkflowV1alpha1WorkflowWatchEvent): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (IoArgoprojWorkflowV1alpha1WorkflowWatchEvent): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_sensor_log_entry.py b/sdks/python/client/argo_workflows/model/stream_result_of_sensor_log_entry.py deleted file mode 100644 index 864a57a48d2d..000000000000 --- a/sdks/python/client/argo_workflows/model/stream_result_of_sensor_log_entry.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - from argo_workflows.model.sensor_log_entry import SensorLogEntry - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - globals()['SensorLogEntry'] = SensorLogEntry - - -class StreamResultOfSensorLogEntry(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (SensorLogEntry,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfSensorLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (SensorLogEntry): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfSensorLogEntry - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (SensorLogEntry): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/stream_result_of_sensor_sensor_watch_event.py b/sdks/python/client/argo_workflows/model/stream_result_of_sensor_sensor_watch_event.py deleted file mode 100644 index 859f0c5dcdba..000000000000 --- a/sdks/python/client/argo_workflows/model/stream_result_of_sensor_sensor_watch_event.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError - from argo_workflows.model.sensor_sensor_watch_event import SensorSensorWatchEvent - globals()['GrpcGatewayRuntimeStreamError'] = GrpcGatewayRuntimeStreamError - globals()['SensorSensorWatchEvent'] = SensorSensorWatchEvent - - -class StreamResultOfSensorSensorWatchEvent(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'error': (GrpcGatewayRuntimeStreamError,), # noqa: E501 - 'result': (SensorSensorWatchEvent,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'error': 'error', # noqa: E501 - 'result': 'result', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """StreamResultOfSensorSensorWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (SensorSensorWatchEvent): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """StreamResultOfSensorSensorWatchEvent - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - error (GrpcGatewayRuntimeStreamError): [optional] # noqa: E501 - result (SensorSensorWatchEvent): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/sysctl.py b/sdks/python/client/argo_workflows/model/sysctl.py deleted file mode 100644 index 4a323894fce1..000000000000 --- a/sdks/python/client/argo_workflows/model/sysctl.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class Sysctl(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'name': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, value, *args, **kwargs): # noqa: E501 - """Sysctl - a model defined in OpenAPI - - Args: - name (str): Name of a property to set - value (str): Value of a property to set - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, value, *args, **kwargs): # noqa: E501 - """Sysctl - a model defined in OpenAPI - - Args: - name (str): Name of a property to set - value (str): Value of a property to set - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.value = value - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/tcp_socket_action.py b/sdks/python/client/argo_workflows/model/tcp_socket_action.py deleted file mode 100644 index 8bbabec27c7b..000000000000 --- a/sdks/python/client/argo_workflows/model/tcp_socket_action.py +++ /dev/null @@ -1,265 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class TCPSocketAction(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'port': (str,), # noqa: E501 - 'host': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'port': 'port', # noqa: E501 - 'host': 'host', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, port, *args, **kwargs): # noqa: E501 - """TCPSocketAction - a model defined in OpenAPI - - Args: - port (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - host (str): Optional: Host name to connect to, defaults to the pod IP.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.port = port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, port, *args, **kwargs): # noqa: E501 - """TCPSocketAction - a model defined in OpenAPI - - Args: - port (str): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - host (str): Optional: Host name to connect to, defaults to the pod IP.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.port = port - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/toleration.py b/sdks/python/client/argo_workflows/model/toleration.py deleted file mode 100644 index 6fc5258bed13..000000000000 --- a/sdks/python/client/argo_workflows/model/toleration.py +++ /dev/null @@ -1,280 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class Toleration(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ('effect',): { - 'NOEXECUTE': "NoExecute", - 'NOSCHEDULE': "NoSchedule", - 'PREFERNOSCHEDULE': "PreferNoSchedule", - }, - ('operator',): { - 'EQUAL': "Equal", - 'EXISTS': "Exists", - }, - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'effect': (str,), # noqa: E501 - 'key': (str,), # noqa: E501 - 'operator': (str,), # noqa: E501 - 'toleration_seconds': (int,), # noqa: E501 - 'value': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'effect': 'effect', # noqa: E501 - 'key': 'key', # noqa: E501 - 'operator': 'operator', # noqa: E501 - 'toleration_seconds': 'tolerationSeconds', # noqa: E501 - 'value': 'value', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """Toleration - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - effect (str): Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute. Possible enum values: - `\"NoExecute\"` Evict any already-running pods that do not tolerate the taint. Currently enforced by NodeController. - `\"NoSchedule\"` Do not allow new pods to schedule onto the node unless they tolerate the taint, but allow all pods submitted to Kubelet without going through the scheduler to start, and allow all already-running pods to continue running. Enforced by the scheduler. - `\"PreferNoSchedule\"` Like TaintEffectNoSchedule, but the scheduler tries not to schedule new pods onto the node, rather than prohibiting new pods from scheduling onto the node entirely. Enforced by the scheduler.. [optional] # noqa: E501 - key (str): Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys.. [optional] # noqa: E501 - operator (str): Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category. Possible enum values: - `\"Equal\"` - `\"Exists\"`. [optional] # noqa: E501 - toleration_seconds (int): TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system.. [optional] # noqa: E501 - value (str): Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """Toleration - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - effect (str): Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute. Possible enum values: - `\"NoExecute\"` Evict any already-running pods that do not tolerate the taint. Currently enforced by NodeController. - `\"NoSchedule\"` Do not allow new pods to schedule onto the node unless they tolerate the taint, but allow all pods submitted to Kubelet without going through the scheduler to start, and allow all already-running pods to continue running. Enforced by the scheduler. - `\"PreferNoSchedule\"` Like TaintEffectNoSchedule, but the scheduler tries not to schedule new pods onto the node, rather than prohibiting new pods from scheduling onto the node entirely. Enforced by the scheduler.. [optional] # noqa: E501 - key (str): Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys.. [optional] # noqa: E501 - operator (str): Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category. Possible enum values: - `\"Equal\"` - `\"Exists\"`. [optional] # noqa: E501 - toleration_seconds (int): TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system.. [optional] # noqa: E501 - value (str): Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/typed_local_object_reference.py b/sdks/python/client/argo_workflows/model/typed_local_object_reference.py deleted file mode 100644 index 81763f889085..000000000000 --- a/sdks/python/client/argo_workflows/model/typed_local_object_reference.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class TypedLocalObjectReference(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'kind': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'api_group': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'kind': 'kind', # noqa: E501 - 'name': 'name', # noqa: E501 - 'api_group': 'apiGroup', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, kind, name, *args, **kwargs): # noqa: E501 - """TypedLocalObjectReference - a model defined in OpenAPI - - Args: - kind (str): Kind is the type of resource being referenced - name (str): Name is the name of resource being referenced - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_group (str): APIGroup is the group for the resource being referenced. If APIGroup is not specified, the specified Kind must be in the core API group. For any other third-party types, APIGroup is required.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.kind = kind - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, kind, name, *args, **kwargs): # noqa: E501 - """TypedLocalObjectReference - a model defined in OpenAPI - - Args: - kind (str): Kind is the type of resource being referenced - name (str): Name is the name of resource being referenced - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - api_group (str): APIGroup is the group for the resource being referenced. If APIGroup is not specified, the specified Kind must be in the core API group. For any other third-party types, APIGroup is required.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.kind = kind - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/volume.py b/sdks/python/client/argo_workflows/model/volume.py deleted file mode 100644 index ad0df855203c..000000000000 --- a/sdks/python/client/argo_workflows/model/volume.py +++ /dev/null @@ -1,439 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.aws_elastic_block_store_volume_source import AWSElasticBlockStoreVolumeSource - from argo_workflows.model.azure_disk_volume_source import AzureDiskVolumeSource - from argo_workflows.model.azure_file_volume_source import AzureFileVolumeSource - from argo_workflows.model.ceph_fs_volume_source import CephFSVolumeSource - from argo_workflows.model.cinder_volume_source import CinderVolumeSource - from argo_workflows.model.config_map_volume_source import ConfigMapVolumeSource - from argo_workflows.model.csi_volume_source import CSIVolumeSource - from argo_workflows.model.downward_api_volume_source import DownwardAPIVolumeSource - from argo_workflows.model.empty_dir_volume_source import EmptyDirVolumeSource - from argo_workflows.model.ephemeral_volume_source import EphemeralVolumeSource - from argo_workflows.model.fc_volume_source import FCVolumeSource - from argo_workflows.model.flex_volume_source import FlexVolumeSource - from argo_workflows.model.flocker_volume_source import FlockerVolumeSource - from argo_workflows.model.gce_persistent_disk_volume_source import GCEPersistentDiskVolumeSource - from argo_workflows.model.git_repo_volume_source import GitRepoVolumeSource - from argo_workflows.model.glusterfs_volume_source import GlusterfsVolumeSource - from argo_workflows.model.host_path_volume_source import HostPathVolumeSource - from argo_workflows.model.iscsi_volume_source import ISCSIVolumeSource - from argo_workflows.model.nfs_volume_source import NFSVolumeSource - from argo_workflows.model.persistent_volume_claim_volume_source import PersistentVolumeClaimVolumeSource - from argo_workflows.model.photon_persistent_disk_volume_source import PhotonPersistentDiskVolumeSource - from argo_workflows.model.portworx_volume_source import PortworxVolumeSource - from argo_workflows.model.projected_volume_source import ProjectedVolumeSource - from argo_workflows.model.quobyte_volume_source import QuobyteVolumeSource - from argo_workflows.model.rbd_volume_source import RBDVolumeSource - from argo_workflows.model.scale_io_volume_source import ScaleIOVolumeSource - from argo_workflows.model.secret_volume_source import SecretVolumeSource - from argo_workflows.model.storage_os_volume_source import StorageOSVolumeSource - from argo_workflows.model.vsphere_virtual_disk_volume_source import VsphereVirtualDiskVolumeSource - globals()['AWSElasticBlockStoreVolumeSource'] = AWSElasticBlockStoreVolumeSource - globals()['AzureDiskVolumeSource'] = AzureDiskVolumeSource - globals()['AzureFileVolumeSource'] = AzureFileVolumeSource - globals()['CSIVolumeSource'] = CSIVolumeSource - globals()['CephFSVolumeSource'] = CephFSVolumeSource - globals()['CinderVolumeSource'] = CinderVolumeSource - globals()['ConfigMapVolumeSource'] = ConfigMapVolumeSource - globals()['DownwardAPIVolumeSource'] = DownwardAPIVolumeSource - globals()['EmptyDirVolumeSource'] = EmptyDirVolumeSource - globals()['EphemeralVolumeSource'] = EphemeralVolumeSource - globals()['FCVolumeSource'] = FCVolumeSource - globals()['FlexVolumeSource'] = FlexVolumeSource - globals()['FlockerVolumeSource'] = FlockerVolumeSource - globals()['GCEPersistentDiskVolumeSource'] = GCEPersistentDiskVolumeSource - globals()['GitRepoVolumeSource'] = GitRepoVolumeSource - globals()['GlusterfsVolumeSource'] = GlusterfsVolumeSource - globals()['HostPathVolumeSource'] = HostPathVolumeSource - globals()['ISCSIVolumeSource'] = ISCSIVolumeSource - globals()['NFSVolumeSource'] = NFSVolumeSource - globals()['PersistentVolumeClaimVolumeSource'] = PersistentVolumeClaimVolumeSource - globals()['PhotonPersistentDiskVolumeSource'] = PhotonPersistentDiskVolumeSource - globals()['PortworxVolumeSource'] = PortworxVolumeSource - globals()['ProjectedVolumeSource'] = ProjectedVolumeSource - globals()['QuobyteVolumeSource'] = QuobyteVolumeSource - globals()['RBDVolumeSource'] = RBDVolumeSource - globals()['ScaleIOVolumeSource'] = ScaleIOVolumeSource - globals()['SecretVolumeSource'] = SecretVolumeSource - globals()['StorageOSVolumeSource'] = StorageOSVolumeSource - globals()['VsphereVirtualDiskVolumeSource'] = VsphereVirtualDiskVolumeSource - - -class Volume(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'name': (str,), # noqa: E501 - 'aws_elastic_block_store': (AWSElasticBlockStoreVolumeSource,), # noqa: E501 - 'azure_disk': (AzureDiskVolumeSource,), # noqa: E501 - 'azure_file': (AzureFileVolumeSource,), # noqa: E501 - 'cephfs': (CephFSVolumeSource,), # noqa: E501 - 'cinder': (CinderVolumeSource,), # noqa: E501 - 'config_map': (ConfigMapVolumeSource,), # noqa: E501 - 'csi': (CSIVolumeSource,), # noqa: E501 - 'downward_api': (DownwardAPIVolumeSource,), # noqa: E501 - 'empty_dir': (EmptyDirVolumeSource,), # noqa: E501 - 'ephemeral': (EphemeralVolumeSource,), # noqa: E501 - 'fc': (FCVolumeSource,), # noqa: E501 - 'flex_volume': (FlexVolumeSource,), # noqa: E501 - 'flocker': (FlockerVolumeSource,), # noqa: E501 - 'gce_persistent_disk': (GCEPersistentDiskVolumeSource,), # noqa: E501 - 'git_repo': (GitRepoVolumeSource,), # noqa: E501 - 'glusterfs': (GlusterfsVolumeSource,), # noqa: E501 - 'host_path': (HostPathVolumeSource,), # noqa: E501 - 'iscsi': (ISCSIVolumeSource,), # noqa: E501 - 'nfs': (NFSVolumeSource,), # noqa: E501 - 'persistent_volume_claim': (PersistentVolumeClaimVolumeSource,), # noqa: E501 - 'photon_persistent_disk': (PhotonPersistentDiskVolumeSource,), # noqa: E501 - 'portworx_volume': (PortworxVolumeSource,), # noqa: E501 - 'projected': (ProjectedVolumeSource,), # noqa: E501 - 'quobyte': (QuobyteVolumeSource,), # noqa: E501 - 'rbd': (RBDVolumeSource,), # noqa: E501 - 'scale_io': (ScaleIOVolumeSource,), # noqa: E501 - 'secret': (SecretVolumeSource,), # noqa: E501 - 'storageos': (StorageOSVolumeSource,), # noqa: E501 - 'vsphere_volume': (VsphereVirtualDiskVolumeSource,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'name': 'name', # noqa: E501 - 'aws_elastic_block_store': 'awsElasticBlockStore', # noqa: E501 - 'azure_disk': 'azureDisk', # noqa: E501 - 'azure_file': 'azureFile', # noqa: E501 - 'cephfs': 'cephfs', # noqa: E501 - 'cinder': 'cinder', # noqa: E501 - 'config_map': 'configMap', # noqa: E501 - 'csi': 'csi', # noqa: E501 - 'downward_api': 'downwardAPI', # noqa: E501 - 'empty_dir': 'emptyDir', # noqa: E501 - 'ephemeral': 'ephemeral', # noqa: E501 - 'fc': 'fc', # noqa: E501 - 'flex_volume': 'flexVolume', # noqa: E501 - 'flocker': 'flocker', # noqa: E501 - 'gce_persistent_disk': 'gcePersistentDisk', # noqa: E501 - 'git_repo': 'gitRepo', # noqa: E501 - 'glusterfs': 'glusterfs', # noqa: E501 - 'host_path': 'hostPath', # noqa: E501 - 'iscsi': 'iscsi', # noqa: E501 - 'nfs': 'nfs', # noqa: E501 - 'persistent_volume_claim': 'persistentVolumeClaim', # noqa: E501 - 'photon_persistent_disk': 'photonPersistentDisk', # noqa: E501 - 'portworx_volume': 'portworxVolume', # noqa: E501 - 'projected': 'projected', # noqa: E501 - 'quobyte': 'quobyte', # noqa: E501 - 'rbd': 'rbd', # noqa: E501 - 'scale_io': 'scaleIO', # noqa: E501 - 'secret': 'secret', # noqa: E501 - 'storageos': 'storageos', # noqa: E501 - 'vsphere_volume': 'vsphereVolume', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """Volume - a model defined in OpenAPI - - Args: - name (str): Volume's name. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - aws_elastic_block_store (AWSElasticBlockStoreVolumeSource): [optional] # noqa: E501 - azure_disk (AzureDiskVolumeSource): [optional] # noqa: E501 - azure_file (AzureFileVolumeSource): [optional] # noqa: E501 - cephfs (CephFSVolumeSource): [optional] # noqa: E501 - cinder (CinderVolumeSource): [optional] # noqa: E501 - config_map (ConfigMapVolumeSource): [optional] # noqa: E501 - csi (CSIVolumeSource): [optional] # noqa: E501 - downward_api (DownwardAPIVolumeSource): [optional] # noqa: E501 - empty_dir (EmptyDirVolumeSource): [optional] # noqa: E501 - ephemeral (EphemeralVolumeSource): [optional] # noqa: E501 - fc (FCVolumeSource): [optional] # noqa: E501 - flex_volume (FlexVolumeSource): [optional] # noqa: E501 - flocker (FlockerVolumeSource): [optional] # noqa: E501 - gce_persistent_disk (GCEPersistentDiskVolumeSource): [optional] # noqa: E501 - git_repo (GitRepoVolumeSource): [optional] # noqa: E501 - glusterfs (GlusterfsVolumeSource): [optional] # noqa: E501 - host_path (HostPathVolumeSource): [optional] # noqa: E501 - iscsi (ISCSIVolumeSource): [optional] # noqa: E501 - nfs (NFSVolumeSource): [optional] # noqa: E501 - persistent_volume_claim (PersistentVolumeClaimVolumeSource): [optional] # noqa: E501 - photon_persistent_disk (PhotonPersistentDiskVolumeSource): [optional] # noqa: E501 - portworx_volume (PortworxVolumeSource): [optional] # noqa: E501 - projected (ProjectedVolumeSource): [optional] # noqa: E501 - quobyte (QuobyteVolumeSource): [optional] # noqa: E501 - rbd (RBDVolumeSource): [optional] # noqa: E501 - scale_io (ScaleIOVolumeSource): [optional] # noqa: E501 - secret (SecretVolumeSource): [optional] # noqa: E501 - storageos (StorageOSVolumeSource): [optional] # noqa: E501 - vsphere_volume (VsphereVirtualDiskVolumeSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """Volume - a model defined in OpenAPI - - Args: - name (str): Volume's name. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - aws_elastic_block_store (AWSElasticBlockStoreVolumeSource): [optional] # noqa: E501 - azure_disk (AzureDiskVolumeSource): [optional] # noqa: E501 - azure_file (AzureFileVolumeSource): [optional] # noqa: E501 - cephfs (CephFSVolumeSource): [optional] # noqa: E501 - cinder (CinderVolumeSource): [optional] # noqa: E501 - config_map (ConfigMapVolumeSource): [optional] # noqa: E501 - csi (CSIVolumeSource): [optional] # noqa: E501 - downward_api (DownwardAPIVolumeSource): [optional] # noqa: E501 - empty_dir (EmptyDirVolumeSource): [optional] # noqa: E501 - ephemeral (EphemeralVolumeSource): [optional] # noqa: E501 - fc (FCVolumeSource): [optional] # noqa: E501 - flex_volume (FlexVolumeSource): [optional] # noqa: E501 - flocker (FlockerVolumeSource): [optional] # noqa: E501 - gce_persistent_disk (GCEPersistentDiskVolumeSource): [optional] # noqa: E501 - git_repo (GitRepoVolumeSource): [optional] # noqa: E501 - glusterfs (GlusterfsVolumeSource): [optional] # noqa: E501 - host_path (HostPathVolumeSource): [optional] # noqa: E501 - iscsi (ISCSIVolumeSource): [optional] # noqa: E501 - nfs (NFSVolumeSource): [optional] # noqa: E501 - persistent_volume_claim (PersistentVolumeClaimVolumeSource): [optional] # noqa: E501 - photon_persistent_disk (PhotonPersistentDiskVolumeSource): [optional] # noqa: E501 - portworx_volume (PortworxVolumeSource): [optional] # noqa: E501 - projected (ProjectedVolumeSource): [optional] # noqa: E501 - quobyte (QuobyteVolumeSource): [optional] # noqa: E501 - rbd (RBDVolumeSource): [optional] # noqa: E501 - scale_io (ScaleIOVolumeSource): [optional] # noqa: E501 - secret (SecretVolumeSource): [optional] # noqa: E501 - storageos (StorageOSVolumeSource): [optional] # noqa: E501 - vsphere_volume (VsphereVirtualDiskVolumeSource): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/volume_device.py b/sdks/python/client/argo_workflows/model/volume_device.py deleted file mode 100644 index e00c8ae37746..000000000000 --- a/sdks/python/client/argo_workflows/model/volume_device.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class VolumeDevice(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'device_path': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'device_path': 'devicePath', # noqa: E501 - 'name': 'name', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, device_path, name, *args, **kwargs): # noqa: E501 - """VolumeDevice - a model defined in OpenAPI - - Args: - device_path (str): devicePath is the path inside of the container that the device will be mapped to. - name (str): name must match the name of a persistentVolumeClaim in the pod - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.device_path = device_path - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, device_path, name, *args, **kwargs): # noqa: E501 - """VolumeDevice - a model defined in OpenAPI - - Args: - device_path (str): devicePath is the path inside of the container that the device will be mapped to. - name (str): name must match the name of a persistentVolumeClaim in the pod - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.device_path = device_path - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/volume_mount.py b/sdks/python/client/argo_workflows/model/volume_mount.py deleted file mode 100644 index 05fe9af79de8..000000000000 --- a/sdks/python/client/argo_workflows/model/volume_mount.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class VolumeMount(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'mount_path': (str,), # noqa: E501 - 'name': (str,), # noqa: E501 - 'mount_propagation': (str,), # noqa: E501 - 'read_only': (bool,), # noqa: E501 - 'sub_path': (str,), # noqa: E501 - 'sub_path_expr': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'mount_path': 'mountPath', # noqa: E501 - 'name': 'name', # noqa: E501 - 'mount_propagation': 'mountPropagation', # noqa: E501 - 'read_only': 'readOnly', # noqa: E501 - 'sub_path': 'subPath', # noqa: E501 - 'sub_path_expr': 'subPathExpr', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, mount_path, name, *args, **kwargs): # noqa: E501 - """VolumeMount - a model defined in OpenAPI - - Args: - mount_path (str): Path within the container at which the volume should be mounted. Must not contain ':'. - name (str): This must match the Name of a Volume. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mount_propagation (str): mountPropagation determines how mounts are propagated from the host to container and the other way around. When not set, MountPropagationNone is used. This field is beta in 1.10.. [optional] # noqa: E501 - read_only (bool): Mounted read-only if true, read-write otherwise (false or unspecified). Defaults to false.. [optional] # noqa: E501 - sub_path (str): Path within the volume from which the container's volume should be mounted. Defaults to \"\" (volume's root).. [optional] # noqa: E501 - sub_path_expr (str): Expanded path within the volume from which the container's volume should be mounted. Behaves similarly to SubPath but environment variable references $(VAR_NAME) are expanded using the container's environment. Defaults to \"\" (volume's root). SubPathExpr and SubPath are mutually exclusive.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.mount_path = mount_path - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, mount_path, name, *args, **kwargs): # noqa: E501 - """VolumeMount - a model defined in OpenAPI - - Args: - mount_path (str): Path within the container at which the volume should be mounted. Must not contain ':'. - name (str): This must match the Name of a Volume. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - mount_propagation (str): mountPropagation determines how mounts are propagated from the host to container and the other way around. When not set, MountPropagationNone is used. This field is beta in 1.10.. [optional] # noqa: E501 - read_only (bool): Mounted read-only if true, read-write otherwise (false or unspecified). Defaults to false.. [optional] # noqa: E501 - sub_path (str): Path within the volume from which the container's volume should be mounted. Defaults to \"\" (volume's root).. [optional] # noqa: E501 - sub_path_expr (str): Expanded path within the volume from which the container's volume should be mounted. Behaves similarly to SubPath but environment variable references $(VAR_NAME) are expanded using the container's environment. Defaults to \"\" (volume's root). SubPathExpr and SubPath are mutually exclusive.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.mount_path = mount_path - self.name = name - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/volume_projection.py b/sdks/python/client/argo_workflows/model/volume_projection.py deleted file mode 100644 index 9b016736848a..000000000000 --- a/sdks/python/client/argo_workflows/model/volume_projection.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.config_map_projection import ConfigMapProjection - from argo_workflows.model.downward_api_projection import DownwardAPIProjection - from argo_workflows.model.secret_projection import SecretProjection - from argo_workflows.model.service_account_token_projection import ServiceAccountTokenProjection - globals()['ConfigMapProjection'] = ConfigMapProjection - globals()['DownwardAPIProjection'] = DownwardAPIProjection - globals()['SecretProjection'] = SecretProjection - globals()['ServiceAccountTokenProjection'] = ServiceAccountTokenProjection - - -class VolumeProjection(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'config_map': (ConfigMapProjection,), # noqa: E501 - 'downward_api': (DownwardAPIProjection,), # noqa: E501 - 'secret': (SecretProjection,), # noqa: E501 - 'service_account_token': (ServiceAccountTokenProjection,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'config_map': 'configMap', # noqa: E501 - 'downward_api': 'downwardAPI', # noqa: E501 - 'secret': 'secret', # noqa: E501 - 'service_account_token': 'serviceAccountToken', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """VolumeProjection - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map (ConfigMapProjection): [optional] # noqa: E501 - downward_api (DownwardAPIProjection): [optional] # noqa: E501 - secret (SecretProjection): [optional] # noqa: E501 - service_account_token (ServiceAccountTokenProjection): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """VolumeProjection - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - config_map (ConfigMapProjection): [optional] # noqa: E501 - downward_api (DownwardAPIProjection): [optional] # noqa: E501 - secret (SecretProjection): [optional] # noqa: E501 - service_account_token (ServiceAccountTokenProjection): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/vsphere_virtual_disk_volume_source.py b/sdks/python/client/argo_workflows/model/vsphere_virtual_disk_volume_source.py deleted file mode 100644 index be95c147cfa3..000000000000 --- a/sdks/python/client/argo_workflows/model/vsphere_virtual_disk_volume_source.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class VsphereVirtualDiskVolumeSource(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'volume_path': (str,), # noqa: E501 - 'fs_type': (str,), # noqa: E501 - 'storage_policy_id': (str,), # noqa: E501 - 'storage_policy_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'volume_path': 'volumePath', # noqa: E501 - 'fs_type': 'fsType', # noqa: E501 - 'storage_policy_id': 'storagePolicyID', # noqa: E501 - 'storage_policy_name': 'storagePolicyName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, volume_path, *args, **kwargs): # noqa: E501 - """VsphereVirtualDiskVolumeSource - a model defined in OpenAPI - - Args: - volume_path (str): Path that identifies vSphere volume vmdk - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - storage_policy_id (str): Storage Policy Based Management (SPBM) profile ID associated with the StoragePolicyName.. [optional] # noqa: E501 - storage_policy_name (str): Storage Policy Based Management (SPBM) profile name.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.volume_path = volume_path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, volume_path, *args, **kwargs): # noqa: E501 - """VsphereVirtualDiskVolumeSource - a model defined in OpenAPI - - Args: - volume_path (str): Path that identifies vSphere volume vmdk - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - fs_type (str): Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.. [optional] # noqa: E501 - storage_policy_id (str): Storage Policy Based Management (SPBM) profile ID associated with the StoragePolicyName.. [optional] # noqa: E501 - storage_policy_name (str): Storage Policy Based Management (SPBM) profile name.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.volume_path = volume_path - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/weighted_pod_affinity_term.py b/sdks/python/client/argo_workflows/model/weighted_pod_affinity_term.py deleted file mode 100644 index 899e02b9cbd7..000000000000 --- a/sdks/python/client/argo_workflows/model/weighted_pod_affinity_term.py +++ /dev/null @@ -1,273 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.pod_affinity_term import PodAffinityTerm - globals()['PodAffinityTerm'] = PodAffinityTerm - - -class WeightedPodAffinityTerm(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'pod_affinity_term': (PodAffinityTerm,), # noqa: E501 - 'weight': (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'pod_affinity_term': 'podAffinityTerm', # noqa: E501 - 'weight': 'weight', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, pod_affinity_term, weight, *args, **kwargs): # noqa: E501 - """WeightedPodAffinityTerm - a model defined in OpenAPI - - Args: - pod_affinity_term (PodAffinityTerm): - weight (int): weight associated with matching the corresponding podAffinityTerm, in the range 1-100. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.pod_affinity_term = pod_affinity_term - self.weight = weight - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, pod_affinity_term, weight, *args, **kwargs): # noqa: E501 - """WeightedPodAffinityTerm - a model defined in OpenAPI - - Args: - pod_affinity_term (PodAffinityTerm): - weight (int): weight associated with matching the corresponding podAffinityTerm, in the range 1-100. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.pod_affinity_term = pod_affinity_term - self.weight = weight - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/windows_security_context_options.py b/sdks/python/client/argo_workflows/model/windows_security_context_options.py deleted file mode 100644 index 1f7864af7ea6..000000000000 --- a/sdks/python/client/argo_workflows/model/windows_security_context_options.py +++ /dev/null @@ -1,267 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - - -class WindowsSecurityContextOptions(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'gmsa_credential_spec': (str,), # noqa: E501 - 'gmsa_credential_spec_name': (str,), # noqa: E501 - 'host_process': (bool,), # noqa: E501 - 'run_as_user_name': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'gmsa_credential_spec': 'gmsaCredentialSpec', # noqa: E501 - 'gmsa_credential_spec_name': 'gmsaCredentialSpecName', # noqa: E501 - 'host_process': 'hostProcess', # noqa: E501 - 'run_as_user_name': 'runAsUserName', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """WindowsSecurityContextOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - gmsa_credential_spec (str): GMSACredentialSpec is where the GMSA admission webhook (https://github.com/kubernetes-sigs/windows-gmsa) inlines the contents of the GMSA credential spec named by the GMSACredentialSpecName field.. [optional] # noqa: E501 - gmsa_credential_spec_name (str): GMSACredentialSpecName is the name of the GMSA credential spec to use.. [optional] # noqa: E501 - host_process (bool): HostProcess determines if a container should be run as a 'Host Process' container. This field is alpha-level and will only be honored by components that enable the WindowsHostProcessContainers feature flag. Setting this field without the feature flag will result in errors when validating the Pod. All of a Pod's containers must have the same effective HostProcess value (it is not allowed to have a mix of HostProcess containers and non-HostProcess containers). In addition, if HostProcess is true then HostNetwork must also be set to true.. [optional] # noqa: E501 - run_as_user_name (str): The UserName in Windows to run the entrypoint of the container process. Defaults to the user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """WindowsSecurityContextOptions - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - gmsa_credential_spec (str): GMSACredentialSpec is where the GMSA admission webhook (https://github.com/kubernetes-sigs/windows-gmsa) inlines the contents of the GMSA credential spec named by the GMSACredentialSpecName field.. [optional] # noqa: E501 - gmsa_credential_spec_name (str): GMSACredentialSpecName is the name of the GMSA credential spec to use.. [optional] # noqa: E501 - host_process (bool): HostProcess determines if a container should be run as a 'Host Process' container. This field is alpha-level and will only be honored by components that enable the WindowsHostProcessContainers feature flag. Setting this field without the feature flag will result in errors when validating the Pod. All of a Pod's containers must have the same effective HostProcess value (it is not allowed to have a mix of HostProcess containers and non-HostProcess containers). In addition, if HostProcess is true then HostNetwork must also be set to true.. [optional] # noqa: E501 - run_as_user_name (str): The UserName in Windows to run the entrypoint of the container process. Defaults to the user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model_utils.py b/sdks/python/client/argo_workflows/model_utils.py deleted file mode 100644 index 8844880d1362..000000000000 --- a/sdks/python/client/argo_workflows/model_utils.py +++ /dev/null @@ -1,2037 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -from datetime import date, datetime # noqa: F401 -from copy import deepcopy -import inspect -import io -import os -import pprint -import re -import tempfile - -from dateutil.parser import parse - -from argo_workflows.exceptions import ( - ApiKeyError, - ApiAttributeError, - ApiTypeError, - ApiValueError, -) - -none_type = type(None) -file_type = io.IOBase - - -def convert_js_args_to_python_args(fn): - from functools import wraps - @wraps(fn) - def wrapped_init(_self, *args, **kwargs): - """ - An attribute named `self` received from the api will conflicts with the reserved `self` - parameter of a class method. During generation, `self` attributes are mapped - to `_self` in models. Here, we name `_self` instead of `self` to avoid conflicts. - """ - spec_property_naming = kwargs.get('_spec_property_naming', False) - if spec_property_naming: - kwargs = change_keys_js_to_python(kwargs, _self if isinstance(_self, type) else _self.__class__) - return fn(_self, *args, **kwargs) - return wrapped_init - - -class cached_property(object): - # this caches the result of the function call for fn with no inputs - # use this as a decorator on function methods that you want converted - # into cached properties - result_key = '_results' - - def __init__(self, fn): - self._fn = fn - - def __get__(self, instance, cls=None): - if self.result_key in vars(self): - return vars(self)[self.result_key] - else: - result = self._fn() - setattr(self, self.result_key, result) - return result - - -PRIMITIVE_TYPES = (list, float, int, bool, datetime, date, str, file_type) - -def allows_single_value_input(cls): - """ - This function returns True if the input composed schema model or any - descendant model allows a value only input - This is true for cases where oneOf contains items like: - oneOf: - - float - - NumberWithValidation - - StringEnum - - ArrayModel - - null - TODO: lru_cache this - """ - if ( - issubclass(cls, ModelSimple) or - cls in PRIMITIVE_TYPES - ): - return True - elif issubclass(cls, ModelComposed): - if not cls._composed_schemas['oneOf']: - return False - return any(allows_single_value_input(c) for c in cls._composed_schemas['oneOf']) - return False - -def composed_model_input_classes(cls): - """ - This function returns a list of the possible models that can be accepted as - inputs. - TODO: lru_cache this - """ - if issubclass(cls, ModelSimple) or cls in PRIMITIVE_TYPES: - return [cls] - elif issubclass(cls, ModelNormal): - if cls.discriminator is None: - return [cls] - else: - return get_discriminated_classes(cls) - elif issubclass(cls, ModelComposed): - if not cls._composed_schemas['oneOf']: - return [] - if cls.discriminator is None: - input_classes = [] - for c in cls._composed_schemas['oneOf']: - input_classes.extend(composed_model_input_classes(c)) - return input_classes - else: - return get_discriminated_classes(cls) - return [] - - -class OpenApiModel(object): - """The base class for all OpenAPIModels""" - - def set_attribute(self, name, value): - # this is only used to set properties on self - - path_to_item = [] - if self._path_to_item: - path_to_item.extend(self._path_to_item) - path_to_item.append(name) - - if name in self.openapi_types: - required_types_mixed = self.openapi_types[name] - elif self.additional_properties_type is None: - raise ApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - path_to_item - ) - elif self.additional_properties_type is not None: - required_types_mixed = self.additional_properties_type - - if get_simple_class(name) != str: - error_msg = type_error_message( - var_name=name, - var_value=name, - valid_classes=(str,), - key_type=True - ) - raise ApiTypeError( - error_msg, - path_to_item=path_to_item, - valid_classes=(str,), - key_type=True - ) - - if self._check_type: - value = validate_and_convert_types( - value, required_types_mixed, path_to_item, self._spec_property_naming, - self._check_type, configuration=self._configuration) - if (name,) in self.allowed_values: - check_allowed_values( - self.allowed_values, - (name,), - value - ) - if (name,) in self.validations: - check_validations( - self.validations, - (name,), - value, - self._configuration - ) - self.__dict__['_data_store'][name] = value - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other - - def __setattr__(self, attr, value): - """set the value of an attribute using dot notation: `instance.attr = val`""" - self[attr] = value - - def __getattr__(self, attr): - """get the value of an attribute using dot notation: `instance.attr`""" - return self.__getitem__(attr) - - def __copy__(self): - cls = self.__class__ - if self.get("_spec_property_naming", False): - return cls._new_from_openapi_data(**self.__dict__) - else: - return new_cls.__new__(cls, **self.__dict__) - - def __deepcopy__(self, memo): - cls = self.__class__ - - if self.get("_spec_property_naming", False): - new_inst = cls._new_from_openapi_data() - else: - new_inst = cls.__new__(cls) - - for k, v in self.__dict__.items(): - setattr(new_inst, k, deepcopy(v, memo)) - return new_inst - - - def __new__(cls, *args, **kwargs): - # this function uses the discriminator to - # pick a new schema/class to instantiate because a discriminator - # propertyName value was passed in - - if len(args) == 1: - arg = args[0] - if arg is None and is_type_nullable(cls): - # The input data is the 'null' value and the type is nullable. - return None - - if issubclass(cls, ModelComposed) and allows_single_value_input(cls): - model_kwargs = {} - oneof_instance = get_oneof_instance(cls, model_kwargs, kwargs, model_arg=arg) - return oneof_instance - - - visited_composed_classes = kwargs.get('_visited_composed_classes', ()) - if ( - cls.discriminator is None or - cls in visited_composed_classes - ): - # Use case 1: this openapi schema (cls) does not have a discriminator - # Use case 2: we have already visited this class before and are sure that we - # want to instantiate it this time. We have visited this class deserializing - # a payload with a discriminator. During that process we traveled through - # this class but did not make an instance of it. Now we are making an - # instance of a composed class which contains cls in it, so this time make an instance of cls. - # - # Here's an example of use case 2: If Animal has a discriminator - # petType and we pass in "Dog", and the class Dog - # allOf includes Animal, we move through Animal - # once using the discriminator, and pick Dog. - # Then in the composed schema dog Dog, we will make an instance of the - # Animal class (because Dal has allOf: Animal) but this time we won't travel - # through Animal's discriminator because we passed in - # _visited_composed_classes = (Animal,) - - return super(OpenApiModel, cls).__new__(cls) - - # Get the name and value of the discriminator property. - # The discriminator name is obtained from the discriminator meta-data - # and the discriminator value is obtained from the input data. - discr_propertyname_py = list(cls.discriminator.keys())[0] - discr_propertyname_js = cls.attribute_map[discr_propertyname_py] - if discr_propertyname_js in kwargs: - discr_value = kwargs[discr_propertyname_js] - elif discr_propertyname_py in kwargs: - discr_value = kwargs[discr_propertyname_py] - else: - # The input data does not contain the discriminator property. - path_to_item = kwargs.get('_path_to_item', ()) - raise ApiValueError( - "Cannot deserialize input data due to missing discriminator. " - "The discriminator property '%s' is missing at path: %s" % - (discr_propertyname_js, path_to_item) - ) - - # Implementation note: the last argument to get_discriminator_class - # is a list of visited classes. get_discriminator_class may recursively - # call itself and update the list of visited classes, and the initial - # value must be an empty list. Hence not using 'visited_composed_classes' - new_cls = get_discriminator_class( - cls, discr_propertyname_py, discr_value, []) - if new_cls is None: - path_to_item = kwargs.get('_path_to_item', ()) - disc_prop_value = kwargs.get( - discr_propertyname_js, kwargs.get(discr_propertyname_py)) - raise ApiValueError( - "Cannot deserialize input data due to invalid discriminator " - "value. The OpenAPI document has no mapping for discriminator " - "property '%s'='%s' at path: %s" % - (discr_propertyname_js, disc_prop_value, path_to_item) - ) - - if new_cls in visited_composed_classes: - # if we are making an instance of a composed schema Descendent - # which allOf includes Ancestor, then Ancestor contains - # a discriminator that includes Descendent. - # So if we make an instance of Descendent, we have to make an - # instance of Ancestor to hold the allOf properties. - # This code detects that use case and makes the instance of Ancestor - # For example: - # When making an instance of Dog, _visited_composed_classes = (Dog,) - # then we make an instance of Animal to include in dog._composed_instances - # so when we are here, cls is Animal - # cls.discriminator != None - # cls not in _visited_composed_classes - # new_cls = Dog - # but we know we know that we already have Dog - # because it is in visited_composed_classes - # so make Animal here - return super(OpenApiModel, cls).__new__(cls) - - # Build a list containing all oneOf and anyOf descendants. - oneof_anyof_classes = None - if cls._composed_schemas is not None: - oneof_anyof_classes = ( - cls._composed_schemas.get('oneOf', ()) + - cls._composed_schemas.get('anyOf', ())) - oneof_anyof_child = new_cls in oneof_anyof_classes - kwargs['_visited_composed_classes'] = visited_composed_classes + (cls,) - - if cls._composed_schemas.get('allOf') and oneof_anyof_child: - # Validate that we can make self because when we make the - # new_cls it will not include the allOf validations in self - self_inst = super(OpenApiModel, cls).__new__(cls) - self_inst.__init__(*args, **kwargs) - - if kwargs.get("_spec_property_naming", False): - # when true, implies new is from deserialization - new_inst = new_cls._new_from_openapi_data(*args, **kwargs) - else: - new_inst = new_cls.__new__(new_cls, *args, **kwargs) - new_inst.__init__(*args, **kwargs) - - return new_inst - - - @classmethod - @convert_js_args_to_python_args - def _new_from_openapi_data(cls, *args, **kwargs): - # this function uses the discriminator to - # pick a new schema/class to instantiate because a discriminator - # propertyName value was passed in - - if len(args) == 1: - arg = args[0] - if arg is None and is_type_nullable(cls): - # The input data is the 'null' value and the type is nullable. - return None - - if issubclass(cls, ModelComposed) and allows_single_value_input(cls): - model_kwargs = {} - oneof_instance = get_oneof_instance(cls, model_kwargs, kwargs, model_arg=arg) - return oneof_instance - - - visited_composed_classes = kwargs.get('_visited_composed_classes', ()) - if ( - cls.discriminator is None or - cls in visited_composed_classes - ): - # Use case 1: this openapi schema (cls) does not have a discriminator - # Use case 2: we have already visited this class before and are sure that we - # want to instantiate it this time. We have visited this class deserializing - # a payload with a discriminator. During that process we traveled through - # this class but did not make an instance of it. Now we are making an - # instance of a composed class which contains cls in it, so this time make an instance of cls. - # - # Here's an example of use case 2: If Animal has a discriminator - # petType and we pass in "Dog", and the class Dog - # allOf includes Animal, we move through Animal - # once using the discriminator, and pick Dog. - # Then in the composed schema dog Dog, we will make an instance of the - # Animal class (because Dal has allOf: Animal) but this time we won't travel - # through Animal's discriminator because we passed in - # _visited_composed_classes = (Animal,) - - return cls._from_openapi_data(*args, **kwargs) - - # Get the name and value of the discriminator property. - # The discriminator name is obtained from the discriminator meta-data - # and the discriminator value is obtained from the input data. - discr_propertyname_py = list(cls.discriminator.keys())[0] - discr_propertyname_js = cls.attribute_map[discr_propertyname_py] - if discr_propertyname_js in kwargs: - discr_value = kwargs[discr_propertyname_js] - elif discr_propertyname_py in kwargs: - discr_value = kwargs[discr_propertyname_py] - else: - # The input data does not contain the discriminator property. - path_to_item = kwargs.get('_path_to_item', ()) - raise ApiValueError( - "Cannot deserialize input data due to missing discriminator. " - "The discriminator property '%s' is missing at path: %s" % - (discr_propertyname_js, path_to_item) - ) - - # Implementation note: the last argument to get_discriminator_class - # is a list of visited classes. get_discriminator_class may recursively - # call itself and update the list of visited classes, and the initial - # value must be an empty list. Hence not using 'visited_composed_classes' - new_cls = get_discriminator_class( - cls, discr_propertyname_py, discr_value, []) - if new_cls is None: - path_to_item = kwargs.get('_path_to_item', ()) - disc_prop_value = kwargs.get( - discr_propertyname_js, kwargs.get(discr_propertyname_py)) - raise ApiValueError( - "Cannot deserialize input data due to invalid discriminator " - "value. The OpenAPI document has no mapping for discriminator " - "property '%s'='%s' at path: %s" % - (discr_propertyname_js, disc_prop_value, path_to_item) - ) - - if new_cls in visited_composed_classes: - # if we are making an instance of a composed schema Descendent - # which allOf includes Ancestor, then Ancestor contains - # a discriminator that includes Descendent. - # So if we make an instance of Descendent, we have to make an - # instance of Ancestor to hold the allOf properties. - # This code detects that use case and makes the instance of Ancestor - # For example: - # When making an instance of Dog, _visited_composed_classes = (Dog,) - # then we make an instance of Animal to include in dog._composed_instances - # so when we are here, cls is Animal - # cls.discriminator != None - # cls not in _visited_composed_classes - # new_cls = Dog - # but we know we know that we already have Dog - # because it is in visited_composed_classes - # so make Animal here - return cls._from_openapi_data(*args, **kwargs) - - # Build a list containing all oneOf and anyOf descendants. - oneof_anyof_classes = None - if cls._composed_schemas is not None: - oneof_anyof_classes = ( - cls._composed_schemas.get('oneOf', ()) + - cls._composed_schemas.get('anyOf', ())) - oneof_anyof_child = new_cls in oneof_anyof_classes - kwargs['_visited_composed_classes'] = visited_composed_classes + (cls,) - - if cls._composed_schemas.get('allOf') and oneof_anyof_child: - # Validate that we can make self because when we make the - # new_cls it will not include the allOf validations in self - self_inst = cls._from_openapi_data(*args, **kwargs) - - - new_inst = new_cls._new_from_openapi_data(*args, **kwargs) - return new_inst - - -class ModelSimple(OpenApiModel): - """the parent class of models whose type != object in their - swagger/openapi""" - - def __setitem__(self, name, value): - """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" - if name in self.required_properties: - self.__dict__[name] = value - return - - self.set_attribute(name, value) - - def get(self, name, default=None): - """returns the value of an attribute or some default value if the attribute was not set""" - if name in self.required_properties: - return self.__dict__[name] - - return self.__dict__['_data_store'].get(name, default) - - def __getitem__(self, name): - """get the value of an attribute using square-bracket notation: `instance[attr]`""" - if name in self: - return self.get(name) - - raise ApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - [e for e in [self._path_to_item, name] if e] - ) - - def __contains__(self, name): - """used by `in` operator to check if an attribute value was set in an instance: `'attr' in instance`""" - if name in self.required_properties: - return name in self.__dict__ - - return name in self.__dict__['_data_store'] - - def to_str(self): - """Returns the string representation of the model""" - return str(self.value) - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, self.__class__): - return False - - this_val = self._data_store['value'] - that_val = other._data_store['value'] - types = set() - types.add(this_val.__class__) - types.add(that_val.__class__) - vals_equal = this_val == that_val - return vals_equal - - -class ModelNormal(OpenApiModel): - """the parent class of models whose type == object in their - swagger/openapi""" - - def __setitem__(self, name, value): - """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" - if name in self.required_properties: - self.__dict__[name] = value - return - - self.set_attribute(name, value) - - def get(self, name, default=None): - """returns the value of an attribute or some default value if the attribute was not set""" - if name in self.required_properties: - return self.__dict__[name] - - return self.__dict__['_data_store'].get(name, default) - - def __getitem__(self, name): - """get the value of an attribute using square-bracket notation: `instance[attr]`""" - if name in self: - return self.get(name) - - raise ApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - [e for e in [self._path_to_item, name] if e] - ) - - def __contains__(self, name): - """used by `in` operator to check if an attribute value was set in an instance: `'attr' in instance`""" - if name in self.required_properties: - return name in self.__dict__ - - return name in self.__dict__['_data_store'] - - def to_dict(self): - """Returns the model properties as a dict""" - return model_to_dict(self, serialize=False) - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, self.__class__): - return False - - if not set(self._data_store.keys()) == set(other._data_store.keys()): - return False - for _var_name, this_val in self._data_store.items(): - that_val = other._data_store[_var_name] - types = set() - types.add(this_val.__class__) - types.add(that_val.__class__) - vals_equal = this_val == that_val - if not vals_equal: - return False - return True - - -class ModelComposed(OpenApiModel): - """the parent class of models whose type == object in their - swagger/openapi and have oneOf/allOf/anyOf - - When one sets a property we use var_name_to_model_instances to store the value in - the correct class instances + run any type checking + validation code. - When one gets a property we use var_name_to_model_instances to get the value - from the correct class instances. - This allows multiple composed schemas to contain the same property with additive - constraints on the value. - - _composed_schemas (dict) stores the anyOf/allOf/oneOf classes - key (str): allOf/oneOf/anyOf - value (list): the classes in the XOf definition. - Note: none_type can be included when the openapi document version >= 3.1.0 - _composed_instances (list): stores a list of instances of the composed schemas - defined in _composed_schemas. When properties are accessed in the self instance, - they are returned from the self._data_store or the data stores in the instances - in self._composed_schemas - _var_name_to_model_instances (dict): maps between a variable name on self and - the composed instances (self included) which contain that data - key (str): property name - value (list): list of class instances, self or instances in _composed_instances - which contain the value that the key is referring to. - """ - - def __setitem__(self, name, value): - """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" - if name in self.required_properties: - self.__dict__[name] = value - return - - """ - Use cases: - 1. additional_properties_type is None (additionalProperties == False in spec) - Check for property presence in self.openapi_types - if not present then throw an error - if present set in self, set attribute - always set on composed schemas - 2. additional_properties_type exists - set attribute on self - always set on composed schemas - """ - if self.additional_properties_type is None: - """ - For an attribute to exist on a composed schema it must: - - fulfill schema_requirements in the self composed schema not considering oneOf/anyOf/allOf schemas AND - - fulfill schema_requirements in each oneOf/anyOf/allOf schemas - - schema_requirements: - For an attribute to exist on a schema it must: - - be present in properties at the schema OR - - have additionalProperties unset (defaults additionalProperties = any type) OR - - have additionalProperties set - """ - if name not in self.openapi_types: - raise ApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - [e for e in [self._path_to_item, name] if e] - ) - # attribute must be set on self and composed instances - self.set_attribute(name, value) - for model_instance in self._composed_instances: - setattr(model_instance, name, value) - if name not in self._var_name_to_model_instances: - # we assigned an additional property - self.__dict__['_var_name_to_model_instances'][name] = self._composed_instances + [self] - return None - - __unset_attribute_value__ = object() - - def get(self, name, default=None): - """returns the value of an attribute or some default value if the attribute was not set""" - if name in self.required_properties: - return self.__dict__[name] - - # get the attribute from the correct instance - model_instances = self._var_name_to_model_instances.get(name) - values = [] - # A composed model stores self and child (oneof/anyOf/allOf) models under - # self._var_name_to_model_instances. - # Any property must exist in self and all model instances - # The value stored in all model instances must be the same - if model_instances: - for model_instance in model_instances: - if name in model_instance._data_store: - v = model_instance._data_store[name] - if v not in values: - values.append(v) - len_values = len(values) - if len_values == 0: - return default - elif len_values == 1: - return values[0] - elif len_values > 1: - raise ApiValueError( - "Values stored for property {0} in {1} differ when looking " - "at self and self's composed instances. All values must be " - "the same".format(name, type(self).__name__), - [e for e in [self._path_to_item, name] if e] - ) - - def __getitem__(self, name): - """get the value of an attribute using square-bracket notation: `instance[attr]`""" - value = self.get(name, self.__unset_attribute_value__) - if value is self.__unset_attribute_value__: - raise ApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - [e for e in [self._path_to_item, name] if e] - ) - return value - - def __contains__(self, name): - """used by `in` operator to check if an attribute value was set in an instance: `'attr' in instance`""" - - if name in self.required_properties: - return name in self.__dict__ - - model_instances = self._var_name_to_model_instances.get( - name, self._additional_properties_model_instances) - - if model_instances: - for model_instance in model_instances: - if name in model_instance._data_store: - return True - - return False - - def to_dict(self): - """Returns the model properties as a dict""" - return model_to_dict(self, serialize=False) - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, self.__class__): - return False - - if not set(self._data_store.keys()) == set(other._data_store.keys()): - return False - for _var_name, this_val in self._data_store.items(): - that_val = other._data_store[_var_name] - types = set() - types.add(this_val.__class__) - types.add(that_val.__class__) - vals_equal = this_val == that_val - if not vals_equal: - return False - return True - - -COERCION_INDEX_BY_TYPE = { - ModelComposed: 0, - ModelNormal: 1, - ModelSimple: 2, - none_type: 3, # The type of 'None'. - list: 4, - dict: 5, - float: 6, - int: 7, - bool: 8, - datetime: 9, - date: 10, - str: 11, - file_type: 12, # 'file_type' is an alias for the built-in 'file' or 'io.IOBase' type. -} - -# these are used to limit what type conversions we try to do -# when we have a valid type already and we want to try converting -# to another type -UPCONVERSION_TYPE_PAIRS = ( - (str, datetime), - (str, date), - (int, float), # A float may be serialized as an integer, e.g. '3' is a valid serialized float. - (list, ModelComposed), - (dict, ModelComposed), - (str, ModelComposed), - (int, ModelComposed), - (float, ModelComposed), - (list, ModelComposed), - (list, ModelNormal), - (dict, ModelNormal), - (str, ModelSimple), - (int, ModelSimple), - (float, ModelSimple), - (list, ModelSimple), -) - -COERCIBLE_TYPE_PAIRS = { - False: ( # client instantiation of a model with client data - # (dict, ModelComposed), - # (list, ModelComposed), - # (dict, ModelNormal), - # (list, ModelNormal), - # (str, ModelSimple), - # (int, ModelSimple), - # (float, ModelSimple), - # (list, ModelSimple), - # (str, int), - # (str, float), - # (str, datetime), - # (str, date), - # (int, str), - # (float, str), - ), - True: ( # server -> client data - (dict, ModelComposed), - (list, ModelComposed), - (dict, ModelNormal), - (list, ModelNormal), - (str, ModelSimple), - (int, ModelSimple), - (float, ModelSimple), - (list, ModelSimple), - # (str, int), - # (str, float), - (str, datetime), - (str, date), - # (int, str), - # (float, str), - (str, file_type) - ), -} - - -def get_simple_class(input_value): - """Returns an input_value's simple class that we will use for type checking - Python2: - float and int will return int, where int is the python3 int backport - str and unicode will return str, where str is the python3 str backport - Note: float and int ARE both instances of int backport - Note: str_py2 and unicode_py2 are NOT both instances of str backport - - Args: - input_value (class/class_instance): the item for which we will return - the simple class - """ - if isinstance(input_value, type): - # input_value is a class - return input_value - elif isinstance(input_value, tuple): - return tuple - elif isinstance(input_value, list): - return list - elif isinstance(input_value, dict): - return dict - elif isinstance(input_value, none_type): - return none_type - elif isinstance(input_value, file_type): - return file_type - elif isinstance(input_value, bool): - # this must be higher than the int check because - # isinstance(True, int) == True - return bool - elif isinstance(input_value, int): - return int - elif isinstance(input_value, datetime): - # this must be higher than the date check because - # isinstance(datetime_instance, date) == True - return datetime - elif isinstance(input_value, date): - return date - elif isinstance(input_value, str): - return str - return type(input_value) - - -def check_allowed_values(allowed_values, input_variable_path, input_values): - """Raises an exception if the input_values are not allowed - - Args: - allowed_values (dict): the allowed_values dict - input_variable_path (tuple): the path to the input variable - input_values (list/str/int/float/date/datetime): the values that we - are checking to see if they are in allowed_values - """ - these_allowed_values = list(allowed_values[input_variable_path].values()) - if (isinstance(input_values, list) - and not set(input_values).issubset( - set(these_allowed_values))): - invalid_values = ", ".join( - map(str, set(input_values) - set(these_allowed_values))), - raise ApiValueError( - "Invalid values for `%s` [%s], must be a subset of [%s]" % - ( - input_variable_path[0], - invalid_values, - ", ".join(map(str, these_allowed_values)) - ) - ) - elif (isinstance(input_values, dict) - and not set( - input_values.keys()).issubset(set(these_allowed_values))): - invalid_values = ", ".join( - map(str, set(input_values.keys()) - set(these_allowed_values))) - raise ApiValueError( - "Invalid keys in `%s` [%s], must be a subset of [%s]" % - ( - input_variable_path[0], - invalid_values, - ", ".join(map(str, these_allowed_values)) - ) - ) - elif (not isinstance(input_values, (list, dict)) - and input_values not in these_allowed_values): - raise ApiValueError( - "Invalid value for `%s` (%s), must be one of %s" % - ( - input_variable_path[0], - input_values, - these_allowed_values - ) - ) - - -def is_json_validation_enabled(schema_keyword, configuration=None): - """Returns true if JSON schema validation is enabled for the specified - validation keyword. This can be used to skip JSON schema structural validation - as requested in the configuration. - - Args: - schema_keyword (string): the name of a JSON schema validation keyword. - configuration (Configuration): the configuration class. - """ - - return (configuration is None or - not hasattr(configuration, '_disabled_client_side_validations') or - schema_keyword not in configuration._disabled_client_side_validations) - - -def check_validations( - validations, input_variable_path, input_values, - configuration=None): - """Raises an exception if the input_values are invalid - - Args: - validations (dict): the validation dictionary. - input_variable_path (tuple): the path to the input variable. - input_values (list/str/int/float/date/datetime): the values that we - are checking. - configuration (Configuration): the configuration class. - """ - - if input_values is None: - return - - current_validations = validations[input_variable_path] - if (is_json_validation_enabled('multipleOf', configuration) and - 'multiple_of' in current_validations and - isinstance(input_values, (int, float)) and - not (float(input_values) / current_validations['multiple_of']).is_integer()): - # Note 'multipleOf' will be as good as the floating point arithmetic. - raise ApiValueError( - "Invalid value for `%s`, value must be a multiple of " - "`%s`" % ( - input_variable_path[0], - current_validations['multiple_of'] - ) - ) - - if (is_json_validation_enabled('maxLength', configuration) and - 'max_length' in current_validations and - len(input_values) > current_validations['max_length']): - raise ApiValueError( - "Invalid value for `%s`, length must be less than or equal to " - "`%s`" % ( - input_variable_path[0], - current_validations['max_length'] - ) - ) - - if (is_json_validation_enabled('minLength', configuration) and - 'min_length' in current_validations and - len(input_values) < current_validations['min_length']): - raise ApiValueError( - "Invalid value for `%s`, length must be greater than or equal to " - "`%s`" % ( - input_variable_path[0], - current_validations['min_length'] - ) - ) - - if (is_json_validation_enabled('maxItems', configuration) and - 'max_items' in current_validations and - len(input_values) > current_validations['max_items']): - raise ApiValueError( - "Invalid value for `%s`, number of items must be less than or " - "equal to `%s`" % ( - input_variable_path[0], - current_validations['max_items'] - ) - ) - - if (is_json_validation_enabled('minItems', configuration) and - 'min_items' in current_validations and - len(input_values) < current_validations['min_items']): - raise ValueError( - "Invalid value for `%s`, number of items must be greater than or " - "equal to `%s`" % ( - input_variable_path[0], - current_validations['min_items'] - ) - ) - - items = ('exclusive_maximum', 'inclusive_maximum', 'exclusive_minimum', - 'inclusive_minimum') - if (any(item in current_validations for item in items)): - if isinstance(input_values, list): - max_val = max(input_values) - min_val = min(input_values) - elif isinstance(input_values, dict): - max_val = max(input_values.values()) - min_val = min(input_values.values()) - else: - max_val = input_values - min_val = input_values - - if (is_json_validation_enabled('exclusiveMaximum', configuration) and - 'exclusive_maximum' in current_validations and - max_val >= current_validations['exclusive_maximum']): - raise ApiValueError( - "Invalid value for `%s`, must be a value less than `%s`" % ( - input_variable_path[0], - current_validations['exclusive_maximum'] - ) - ) - - if (is_json_validation_enabled('maximum', configuration) and - 'inclusive_maximum' in current_validations and - max_val > current_validations['inclusive_maximum']): - raise ApiValueError( - "Invalid value for `%s`, must be a value less than or equal to " - "`%s`" % ( - input_variable_path[0], - current_validations['inclusive_maximum'] - ) - ) - - if (is_json_validation_enabled('exclusiveMinimum', configuration) and - 'exclusive_minimum' in current_validations and - min_val <= current_validations['exclusive_minimum']): - raise ApiValueError( - "Invalid value for `%s`, must be a value greater than `%s`" % - ( - input_variable_path[0], - current_validations['exclusive_maximum'] - ) - ) - - if (is_json_validation_enabled('minimum', configuration) and - 'inclusive_minimum' in current_validations and - min_val < current_validations['inclusive_minimum']): - raise ApiValueError( - "Invalid value for `%s`, must be a value greater than or equal " - "to `%s`" % ( - input_variable_path[0], - current_validations['inclusive_minimum'] - ) - ) - flags = current_validations.get('regex', {}).get('flags', 0) - if (is_json_validation_enabled('pattern', configuration) and - 'regex' in current_validations and - not re.search(current_validations['regex']['pattern'], - input_values, flags=flags)): - err_msg = r"Invalid value for `%s`, must match regular expression `%s`" % ( - input_variable_path[0], - current_validations['regex']['pattern'] - ) - if flags != 0: - # Don't print the regex flags if the flags are not - # specified in the OAS document. - err_msg = r"%s with flags=`%s`" % (err_msg, flags) - raise ApiValueError(err_msg) - - -def order_response_types(required_types): - """Returns the required types sorted in coercion order - - Args: - required_types (list/tuple): collection of classes or instance of - list or dict with class information inside it. - - Returns: - (list): coercion order sorted collection of classes or instance - of list or dict with class information inside it. - """ - - def index_getter(class_or_instance): - if isinstance(class_or_instance, list): - return COERCION_INDEX_BY_TYPE[list] - elif isinstance(class_or_instance, dict): - return COERCION_INDEX_BY_TYPE[dict] - elif (inspect.isclass(class_or_instance) - and issubclass(class_or_instance, ModelComposed)): - return COERCION_INDEX_BY_TYPE[ModelComposed] - elif (inspect.isclass(class_or_instance) - and issubclass(class_or_instance, ModelNormal)): - return COERCION_INDEX_BY_TYPE[ModelNormal] - elif (inspect.isclass(class_or_instance) - and issubclass(class_or_instance, ModelSimple)): - return COERCION_INDEX_BY_TYPE[ModelSimple] - elif class_or_instance in COERCION_INDEX_BY_TYPE: - return COERCION_INDEX_BY_TYPE[class_or_instance] - raise ApiValueError("Unsupported type: %s" % class_or_instance) - - sorted_types = sorted( - required_types, - key=lambda class_or_instance: index_getter(class_or_instance) - ) - return sorted_types - - -def remove_uncoercible(required_types_classes, current_item, spec_property_naming, - must_convert=True): - """Only keeps the type conversions that are possible - - Args: - required_types_classes (tuple): tuple of classes that are required - these should be ordered by COERCION_INDEX_BY_TYPE - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - current_item (any): the current item (input data) to be converted - - Keyword Args: - must_convert (bool): if True the item to convert is of the wrong - type and we want a big list of coercibles - if False, we want a limited list of coercibles - - Returns: - (list): the remaining coercible required types, classes only - """ - current_type_simple = get_simple_class(current_item) - - results_classes = [] - for required_type_class in required_types_classes: - # convert our models to OpenApiModel - required_type_class_simplified = required_type_class - if isinstance(required_type_class_simplified, type): - if issubclass(required_type_class_simplified, ModelComposed): - required_type_class_simplified = ModelComposed - elif issubclass(required_type_class_simplified, ModelNormal): - required_type_class_simplified = ModelNormal - elif issubclass(required_type_class_simplified, ModelSimple): - required_type_class_simplified = ModelSimple - - if required_type_class_simplified == current_type_simple: - # don't consider converting to one's own class - continue - - class_pair = (current_type_simple, required_type_class_simplified) - if must_convert and class_pair in COERCIBLE_TYPE_PAIRS[spec_property_naming]: - results_classes.append(required_type_class) - elif class_pair in UPCONVERSION_TYPE_PAIRS: - results_classes.append(required_type_class) - return results_classes - -def get_discriminated_classes(cls): - """ - Returns all the classes that a discriminator converts to - TODO: lru_cache this - """ - possible_classes = [] - key = list(cls.discriminator.keys())[0] - if is_type_nullable(cls): - possible_classes.append(cls) - for discr_cls in cls.discriminator[key].values(): - if hasattr(discr_cls, 'discriminator') and discr_cls.discriminator is not None: - possible_classes.extend(get_discriminated_classes(discr_cls)) - else: - possible_classes.append(discr_cls) - return possible_classes - - -def get_possible_classes(cls, from_server_context): - # TODO: lru_cache this - possible_classes = [cls] - if from_server_context: - return possible_classes - if hasattr(cls, 'discriminator') and cls.discriminator is not None: - possible_classes = [] - possible_classes.extend(get_discriminated_classes(cls)) - elif issubclass(cls, ModelComposed): - possible_classes.extend(composed_model_input_classes(cls)) - return possible_classes - - -def get_required_type_classes(required_types_mixed, spec_property_naming): - """Converts the tuple required_types into a tuple and a dict described - below - - Args: - required_types_mixed (tuple/list): will contain either classes or - instance of list or dict - spec_property_naming (bool): if True these values came from the - server, and we use the data types in our endpoints. - If False, we are client side and we need to include - oneOf and discriminator classes inside the data types in our endpoints - - Returns: - (valid_classes, dict_valid_class_to_child_types_mixed): - valid_classes (tuple): the valid classes that the current item - should be - dict_valid_class_to_child_types_mixed (dict): - valid_class (class): this is the key - child_types_mixed (list/dict/tuple): describes the valid child - types - """ - valid_classes = [] - child_req_types_by_current_type = {} - for required_type in required_types_mixed: - if isinstance(required_type, list): - valid_classes.append(list) - child_req_types_by_current_type[list] = required_type - elif isinstance(required_type, tuple): - valid_classes.append(tuple) - child_req_types_by_current_type[tuple] = required_type - elif isinstance(required_type, dict): - valid_classes.append(dict) - child_req_types_by_current_type[dict] = required_type[str] - else: - valid_classes.extend(get_possible_classes(required_type, spec_property_naming)) - return tuple(valid_classes), child_req_types_by_current_type - - -def change_keys_js_to_python(input_dict, model_class): - """ - Converts from javascript_key keys in the input_dict to python_keys in - the output dict using the mapping in model_class. - If the input_dict contains a key which does not declared in the model_class, - the key is added to the output dict as is. The assumption is the model_class - may have undeclared properties (additionalProperties attribute in the OAS - document). - """ - - if getattr(model_class, 'attribute_map', None) is None: - return input_dict - output_dict = {} - reversed_attr_map = {value: key for key, value in - model_class.attribute_map.items()} - for javascript_key, value in input_dict.items(): - python_key = reversed_attr_map.get(javascript_key) - if python_key is None: - # if the key is unknown, it is in error or it is an - # additionalProperties variable - python_key = javascript_key - output_dict[python_key] = value - return output_dict - - -def get_type_error(var_value, path_to_item, valid_classes, key_type=False): - error_msg = type_error_message( - var_name=path_to_item[-1], - var_value=var_value, - valid_classes=valid_classes, - key_type=key_type - ) - return ApiTypeError( - error_msg, - path_to_item=path_to_item, - valid_classes=valid_classes, - key_type=key_type - ) - - -def deserialize_primitive(data, klass, path_to_item): - """Deserializes string to primitive type. - - :param data: str/int/float - :param klass: str/class the class to convert to - - :return: int, float, str, bool, date, datetime - """ - additional_message = "" - try: - if klass in {datetime, date}: - additional_message = ( - "If you need your parameter to have a fallback " - "string value, please set its type as `type: {}` in your " - "spec. That allows the value to be any type. " - ) - if klass == datetime: - if len(data) < 8: - raise ValueError("This is not a datetime") - # The string should be in iso8601 datetime format. - parsed_datetime = parse(data) - date_only = ( - parsed_datetime.hour == 0 and - parsed_datetime.minute == 0 and - parsed_datetime.second == 0 and - parsed_datetime.tzinfo is None and - 8 <= len(data) <= 10 - ) - if date_only: - raise ValueError("This is a date, not a datetime") - return parsed_datetime - elif klass == date: - if len(data) < 8: - raise ValueError("This is not a date") - return parse(data).date() - else: - converted_value = klass(data) - if isinstance(data, str) and klass == float: - if str(converted_value) != data: - # '7' -> 7.0 -> '7.0' != '7' - raise ValueError('This is not a float') - return converted_value - except (OverflowError, ValueError) as ex: - # parse can raise OverflowError - raise ApiValueError( - "{0}Failed to parse {1} as {2}".format( - additional_message, repr(data), klass.__name__ - ), - path_to_item=path_to_item - ) from ex - - -def get_discriminator_class(model_class, - discr_name, - discr_value, cls_visited): - """Returns the child class specified by the discriminator. - - Args: - model_class (OpenApiModel): the model class. - discr_name (string): the name of the discriminator property. - discr_value (any): the discriminator value. - cls_visited (list): list of model classes that have been visited. - Used to determine the discriminator class without - visiting circular references indefinitely. - - Returns: - used_model_class (class/None): the chosen child class that will be used - to deserialize the data, for example dog.Dog. - If a class is not found, None is returned. - """ - - if model_class in cls_visited: - # The class has already been visited and no suitable class was found. - return None - cls_visited.append(model_class) - used_model_class = None - if discr_name in model_class.discriminator: - class_name_to_discr_class = model_class.discriminator[discr_name] - used_model_class = class_name_to_discr_class.get(discr_value) - if used_model_class is None: - # We didn't find a discriminated class in class_name_to_discr_class. - # So look in the ancestor or descendant discriminators - # The discriminator mapping may exist in a descendant (anyOf, oneOf) - # or ancestor (allOf). - # Ancestor example: in the GrandparentAnimal -> ParentPet -> ChildCat - # hierarchy, the discriminator mappings may be defined at any level - # in the hierarchy. - # Descendant example: mammal -> whale/zebra/Pig -> BasquePig/DanishPig - # if we try to make BasquePig from mammal, we need to travel through - # the oneOf descendant discriminators to find BasquePig - descendant_classes = model_class._composed_schemas.get('oneOf', ()) + \ - model_class._composed_schemas.get('anyOf', ()) - ancestor_classes = model_class._composed_schemas.get('allOf', ()) - possible_classes = descendant_classes + ancestor_classes - for cls in possible_classes: - # Check if the schema has inherited discriminators. - if hasattr(cls, 'discriminator') and cls.discriminator is not None: - used_model_class = get_discriminator_class( - cls, discr_name, discr_value, cls_visited) - if used_model_class is not None: - return used_model_class - return used_model_class - - -def deserialize_model(model_data, model_class, path_to_item, check_type, - configuration, spec_property_naming): - """Deserializes model_data to model instance. - - Args: - model_data (int/str/float/bool/none_type/list/dict): data to instantiate the model - model_class (OpenApiModel): the model class - path_to_item (list): path to the model in the received data - check_type (bool): whether to check the data tupe for the values in - the model - configuration (Configuration): the instance to use to convert files - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - - Returns: - model instance - - Raise: - ApiTypeError - ApiValueError - ApiKeyError - """ - - kw_args = dict(_check_type=check_type, - _path_to_item=path_to_item, - _configuration=configuration, - _spec_property_naming=spec_property_naming) - - if issubclass(model_class, ModelSimple): - return model_class._new_from_openapi_data(model_data, **kw_args) - elif isinstance(model_data, list): - return model_class._new_from_openapi_data(*model_data, **kw_args) - if isinstance(model_data, dict): - kw_args.update(model_data) - return model_class._new_from_openapi_data(**kw_args) - elif isinstance(model_data, PRIMITIVE_TYPES): - return model_class._new_from_openapi_data(model_data, **kw_args) - - -def deserialize_file(response_data, configuration, content_disposition=None): - """Deserializes body to file - - Saves response body into a file in a temporary folder, - using the filename from the `Content-Disposition` header if provided. - - Args: - param response_data (str): the file data to write - configuration (Configuration): the instance to use to convert files - - Keyword Args: - content_disposition (str): the value of the Content-Disposition - header - - Returns: - (file_type): the deserialized file which is open - The user is responsible for closing and reading the file - """ - fd, path = tempfile.mkstemp(dir=configuration.temp_folder_path) - os.close(fd) - os.remove(path) - - if content_disposition: - filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', - content_disposition).group(1) - path = os.path.join(os.path.dirname(path), filename) - - with open(path, "wb") as f: - if isinstance(response_data, str): - # change str to bytes so we can write it - response_data = response_data.encode('utf-8') - f.write(response_data) - - f = open(path, "rb") - return f - - -def attempt_convert_item(input_value, valid_classes, path_to_item, - configuration, spec_property_naming, key_type=False, - must_convert=False, check_type=True): - """ - Args: - input_value (any): the data to convert - valid_classes (any): the classes that are valid - path_to_item (list): the path to the item to convert - configuration (Configuration): the instance to use to convert files - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - key_type (bool): if True we need to convert a key type (not supported) - must_convert (bool): if True we must convert - check_type (bool): if True we check the type or the returned data in - ModelComposed/ModelNormal/ModelSimple instances - - Returns: - instance (any) the fixed item - - Raises: - ApiTypeError - ApiValueError - ApiKeyError - """ - valid_classes_ordered = order_response_types(valid_classes) - valid_classes_coercible = remove_uncoercible( - valid_classes_ordered, input_value, spec_property_naming) - if not valid_classes_coercible or key_type: - # we do not handle keytype errors, json will take care - # of this for us - if configuration is None or not configuration.discard_unknown_keys: - raise get_type_error(input_value, path_to_item, valid_classes, - key_type=key_type) - for valid_class in valid_classes_coercible: - try: - if issubclass(valid_class, OpenApiModel): - return deserialize_model(input_value, valid_class, - path_to_item, check_type, - configuration, spec_property_naming) - elif valid_class == file_type: - return deserialize_file(input_value, configuration) - return deserialize_primitive(input_value, valid_class, - path_to_item) - except (ApiTypeError, ApiValueError, ApiKeyError) as conversion_exc: - if must_convert: - raise conversion_exc - # if we have conversion errors when must_convert == False - # we ignore the exception and move on to the next class - continue - # we were unable to convert, must_convert == False - return input_value - - -def is_type_nullable(input_type): - """ - Returns true if None is an allowed value for the specified input_type. - - A type is nullable if at least one of the following conditions is true: - 1. The OAS 'nullable' attribute has been specified, - 1. The type is the 'null' type, - 1. The type is a anyOf/oneOf composed schema, and a child schema is - the 'null' type. - Args: - input_type (type): the class of the input_value that we are - checking - Returns: - bool - """ - if input_type is none_type: - return True - if issubclass(input_type, OpenApiModel) and input_type._nullable: - return True - if issubclass(input_type, ModelComposed): - # If oneOf/anyOf, check if the 'null' type is one of the allowed types. - for t in input_type._composed_schemas.get('oneOf', ()): - if is_type_nullable(t): return True - for t in input_type._composed_schemas.get('anyOf', ()): - if is_type_nullable(t): return True - return False - - -def is_valid_type(input_class_simple, valid_classes): - """ - Args: - input_class_simple (class): the class of the input_value that we are - checking - valid_classes (tuple): the valid classes that the current item - should be - Returns: - bool - """ - if issubclass(input_class_simple, OpenApiModel) and \ - valid_classes == (bool, date, datetime, dict, float, int, list, str, none_type,): - return True - valid_type = input_class_simple in valid_classes - if not valid_type and ( - issubclass(input_class_simple, OpenApiModel) or - input_class_simple is none_type): - for valid_class in valid_classes: - if input_class_simple is none_type and is_type_nullable(valid_class): - # Schema is oneOf/anyOf and the 'null' type is one of the allowed types. - return True - if not (issubclass(valid_class, OpenApiModel) and valid_class.discriminator): - continue - discr_propertyname_py = list(valid_class.discriminator.keys())[0] - discriminator_classes = ( - valid_class.discriminator[discr_propertyname_py].values() - ) - valid_type = is_valid_type(input_class_simple, discriminator_classes) - if valid_type: - return True - return valid_type - - -def validate_and_convert_types(input_value, required_types_mixed, path_to_item, - spec_property_naming, _check_type, configuration=None): - """Raises a TypeError is there is a problem, otherwise returns value - - Args: - input_value (any): the data to validate/convert - required_types_mixed (list/dict/tuple): A list of - valid classes, or a list tuples of valid classes, or a dict where - the value is a tuple of value classes - path_to_item: (list) the path to the data being validated - this stores a list of keys or indices to get to the data being - validated - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - _check_type: (boolean) if true, type will be checked and conversion - will be attempted. - configuration: (Configuration): the configuration class to use - when converting file_type items. - If passed, conversion will be attempted when possible - If not passed, no conversions will be attempted and - exceptions will be raised - - Returns: - the correctly typed value - - Raises: - ApiTypeError - """ - results = get_required_type_classes(required_types_mixed, spec_property_naming) - valid_classes, child_req_types_by_current_type = results - - input_class_simple = get_simple_class(input_value) - valid_type = is_valid_type(input_class_simple, valid_classes) - if not valid_type: - if configuration: - # if input_value is not valid_type try to convert it - converted_instance = attempt_convert_item( - input_value, - valid_classes, - path_to_item, - configuration, - spec_property_naming, - key_type=False, - must_convert=True, - check_type=_check_type - ) - return converted_instance - else: - raise get_type_error(input_value, path_to_item, valid_classes, - key_type=False) - - # input_value's type is in valid_classes - if len(valid_classes) > 1 and configuration: - # there are valid classes which are not the current class - valid_classes_coercible = remove_uncoercible( - valid_classes, input_value, spec_property_naming, must_convert=False) - if valid_classes_coercible: - converted_instance = attempt_convert_item( - input_value, - valid_classes_coercible, - path_to_item, - configuration, - spec_property_naming, - key_type=False, - must_convert=False, - check_type=_check_type - ) - return converted_instance - - if child_req_types_by_current_type == {}: - # all types are of the required types and there are no more inner - # variables left to look at - return input_value - inner_required_types = child_req_types_by_current_type.get( - type(input_value) - ) - if inner_required_types is None: - # for this type, there are not more inner variables left to look at - return input_value - if isinstance(input_value, list): - if input_value == []: - # allow an empty list - return input_value - for index, inner_value in enumerate(input_value): - inner_path = list(path_to_item) - inner_path.append(index) - input_value[index] = validate_and_convert_types( - inner_value, - inner_required_types, - inner_path, - spec_property_naming, - _check_type, - configuration=configuration - ) - elif isinstance(input_value, dict): - if input_value == {}: - # allow an empty dict - return input_value - for inner_key, inner_val in input_value.items(): - inner_path = list(path_to_item) - inner_path.append(inner_key) - if get_simple_class(inner_key) != str: - raise get_type_error(inner_key, inner_path, valid_classes, - key_type=True) - input_value[inner_key] = validate_and_convert_types( - inner_val, - inner_required_types, - inner_path, - spec_property_naming, - _check_type, - configuration=configuration - ) - return input_value - - -def model_to_dict(model_instance, serialize=True): - """Returns the model properties as a dict - - Args: - model_instance (one of your model instances): the model instance that - will be converted to a dict. - - Keyword Args: - serialize (bool): if True, the keys in the dict will be values from - attribute_map - """ - result = {} - extract_item = lambda item: (item[0], model_to_dict(item[1], serialize=serialize)) if hasattr(item[1], '_data_store') else item - - model_instances = [model_instance] - if model_instance._composed_schemas: - model_instances.extend(model_instance._composed_instances) - seen_json_attribute_names = set() - used_fallback_python_attribute_names = set() - py_to_json_map = {} - for model_instance in model_instances: - for attr, value in model_instance._data_store.items(): - if serialize: - # we use get here because additional property key names do not - # exist in attribute_map - try: - attr = model_instance.attribute_map[attr] - py_to_json_map.update(model_instance.attribute_map) - seen_json_attribute_names.add(attr) - except KeyError: - used_fallback_python_attribute_names.add(attr) - if isinstance(value, list): - if not value: - # empty list or None - result[attr] = value - else: - res = [] - for v in value: - if isinstance(v, PRIMITIVE_TYPES) or v is None: - res.append(v) - elif isinstance(v, ModelSimple): - res.append(v.value) - elif isinstance(v, dict): - res.append(dict(map( - extract_item, - v.items() - ))) - else: - res.append(model_to_dict(v, serialize=serialize)) - result[attr] = res - elif isinstance(value, dict): - result[attr] = dict(map( - extract_item, - value.items() - )) - elif isinstance(value, ModelSimple): - result[attr] = value.value - elif hasattr(value, '_data_store'): - result[attr] = model_to_dict(value, serialize=serialize) - else: - result[attr] = value - if serialize: - for python_key in used_fallback_python_attribute_names: - json_key = py_to_json_map.get(python_key) - if json_key is None: - continue - if python_key == json_key: - continue - json_key_assigned_no_need_for_python_key = json_key in seen_json_attribute_names - if json_key_assigned_no_need_for_python_key: - del result[python_key] - - return result - - -def type_error_message(var_value=None, var_name=None, valid_classes=None, - key_type=None): - """ - Keyword Args: - var_value (any): the variable which has the type_error - var_name (str): the name of the variable which has the typ error - valid_classes (tuple): the accepted classes for current_item's - value - key_type (bool): False if our value is a value in a dict - True if it is a key in a dict - False if our item is an item in a list - """ - key_or_value = 'value' - if key_type: - key_or_value = 'key' - valid_classes_phrase = get_valid_classes_phrase(valid_classes) - msg = ( - "Invalid type for variable '{0}'. Required {1} type {2} and " - "passed type was {3}".format( - var_name, - key_or_value, - valid_classes_phrase, - type(var_value).__name__, - ) - ) - return msg - - -def get_valid_classes_phrase(input_classes): - """Returns a string phrase describing what types are allowed - """ - all_classes = list(input_classes) - all_classes = sorted(all_classes, key=lambda cls: cls.__name__) - all_class_names = [cls.__name__ for cls in all_classes] - if len(all_class_names) == 1: - return 'is {0}'.format(all_class_names[0]) - return "is one of [{0}]".format(", ".join(all_class_names)) - - -def get_allof_instances(self, model_args, constant_args): - """ - Args: - self: the class we are handling - model_args (dict): var_name to var_value - used to make instances - constant_args (dict): - metadata arguments: - _check_type - _path_to_item - _spec_property_naming - _configuration - _visited_composed_classes - - Returns - composed_instances (list) - """ - composed_instances = [] - for allof_class in self._composed_schemas['allOf']: - - try: - if constant_args.get('_spec_property_naming'): - allof_instance = allof_class._from_openapi_data(**model_args, **constant_args) - else: - allof_instance = allof_class(**model_args, **constant_args) - composed_instances.append(allof_instance) - except Exception as ex: - raise ApiValueError( - "Invalid inputs given to generate an instance of '%s'. The " - "input data was invalid for the allOf schema '%s' in the composed " - "schema '%s'. Error=%s" % ( - allof_class.__name__, - allof_class.__name__, - self.__class__.__name__, - str(ex) - ) - ) from ex - return composed_instances - - -def get_oneof_instance(cls, model_kwargs, constant_kwargs, model_arg=None): - """ - Find the oneOf schema that matches the input data (e.g. payload). - If exactly one schema matches the input data, an instance of that schema - is returned. - If zero or more than one schema match the input data, an exception is raised. - In OAS 3.x, the payload MUST, by validation, match exactly one of the - schemas described by oneOf. - - Args: - cls: the class we are handling - model_kwargs (dict): var_name to var_value - The input data, e.g. the payload that must match a oneOf schema - in the OpenAPI document. - constant_kwargs (dict): var_name to var_value - args that every model requires, including configuration, server - and path to item. - - Kwargs: - model_arg: (int, float, bool, str, date, datetime, ModelSimple, None): - the value to assign to a primitive class or ModelSimple class - Notes: - - this is only passed in when oneOf includes types which are not object - - None is used to suppress handling of model_arg, nullable models are handled in __new__ - - Returns - oneof_instance (instance) - """ - if len(cls._composed_schemas['oneOf']) == 0: - return None - - oneof_instances = [] - # Iterate over each oneOf schema and determine if the input data - # matches the oneOf schemas. - for oneof_class in cls._composed_schemas['oneOf']: - # The composed oneOf schema allows the 'null' type and the input data - # is the null value. This is a OAS >= 3.1 feature. - if oneof_class is none_type: - # skip none_types because we are deserializing dict data. - # none_type deserialization is handled in the __new__ method - continue - - single_value_input = allows_single_value_input(oneof_class) - - try: - if not single_value_input: - if constant_kwargs.get('_spec_property_naming'): - oneof_instance = oneof_class._from_openapi_data(**model_kwargs, **constant_kwargs) - else: - oneof_instance = oneof_class(**model_kwargs, **constant_kwargs) - else: - if issubclass(oneof_class, ModelSimple): - if constant_kwargs.get('_spec_property_naming'): - oneof_instance = oneof_class._from_openapi_data(model_arg, **constant_kwargs) - else: - oneof_instance = oneof_class(model_arg, **constant_kwargs) - elif oneof_class in PRIMITIVE_TYPES: - oneof_instance = validate_and_convert_types( - model_arg, - (oneof_class,), - constant_kwargs['_path_to_item'], - constant_kwargs['_spec_property_naming'], - constant_kwargs['_check_type'], - configuration=constant_kwargs['_configuration'] - ) - oneof_instances.append(oneof_instance) - except Exception: - pass - if len(oneof_instances) == 0: - raise ApiValueError( - "Invalid inputs given to generate an instance of %s. None " - "of the oneOf schemas matched the input data." % - cls.__name__ - ) - elif len(oneof_instances) > 1: - raise ApiValueError( - "Invalid inputs given to generate an instance of %s. Multiple " - "oneOf schemas matched the inputs, but a max of one is allowed." % - cls.__name__ - ) - return oneof_instances[0] - - -def get_anyof_instances(self, model_args, constant_args): - """ - Args: - self: the class we are handling - model_args (dict): var_name to var_value - The input data, e.g. the payload that must match at least one - anyOf child schema in the OpenAPI document. - constant_args (dict): var_name to var_value - args that every model requires, including configuration, server - and path to item. - - Returns - anyof_instances (list) - """ - anyof_instances = [] - if len(self._composed_schemas['anyOf']) == 0: - return anyof_instances - - for anyof_class in self._composed_schemas['anyOf']: - # The composed oneOf schema allows the 'null' type and the input data - # is the null value. This is a OAS >= 3.1 feature. - if anyof_class is none_type: - # skip none_types because we are deserializing dict data. - # none_type deserialization is handled in the __new__ method - continue - - try: - if constant_args.get('_spec_property_naming'): - anyof_instance = anyof_class._from_openapi_data(**model_args, **constant_args) - else: - anyof_instance = anyof_class(**model_args, **constant_args) - anyof_instances.append(anyof_instance) - except Exception: - pass - if len(anyof_instances) == 0: - raise ApiValueError( - "Invalid inputs given to generate an instance of %s. None of the " - "anyOf schemas matched the inputs." % - self.__class__.__name__ - ) - return anyof_instances - - -def get_discarded_args(self, composed_instances, model_args): - """ - Gathers the args that were discarded by configuration.discard_unknown_keys - """ - model_arg_keys = model_args.keys() - discarded_args = set() - # arguments passed to self were already converted to python names - # before __init__ was called - for instance in composed_instances: - if instance.__class__ in self._composed_schemas['allOf']: - try: - keys = instance.to_dict().keys() - discarded_keys = model_args - keys - discarded_args.update(discarded_keys) - except Exception: - # allOf integer schema will throw exception - pass - else: - try: - all_keys = set(model_to_dict(instance, serialize=False).keys()) - js_keys = model_to_dict(instance, serialize=True).keys() - all_keys.update(js_keys) - discarded_keys = model_arg_keys - all_keys - discarded_args.update(discarded_keys) - except Exception: - # allOf integer schema will throw exception - pass - return discarded_args - - -def validate_get_composed_info(constant_args, model_args, self): - """ - For composed schemas, generate schema instances for - all schemas in the oneOf/anyOf/allOf definition. If additional - properties are allowed, also assign those properties on - all matched schemas that contain additionalProperties. - Openapi schemas are python classes. - - Exceptions are raised if: - - 0 or > 1 oneOf schema matches the model_args input data - - no anyOf schema matches the model_args input data - - any of the allOf schemas do not match the model_args input data - - Args: - constant_args (dict): these are the args that every model requires - model_args (dict): these are the required and optional spec args that - were passed in to make this model - self (class): the class that we are instantiating - This class contains self._composed_schemas - - Returns: - composed_info (list): length three - composed_instances (list): the composed instances which are not - self - var_name_to_model_instances (dict): a dict going from var_name - to the model_instance which holds that var_name - the model_instance may be self or an instance of one of the - classes in self.composed_instances() - additional_properties_model_instances (list): a list of the - model instances which have the property - additional_properties_type. This list can include self - """ - # create composed_instances - composed_instances = [] - allof_instances = get_allof_instances(self, model_args, constant_args) - composed_instances.extend(allof_instances) - oneof_instance = get_oneof_instance(self.__class__, model_args, constant_args) - if oneof_instance is not None: - composed_instances.append(oneof_instance) - anyof_instances = get_anyof_instances(self, model_args, constant_args) - composed_instances.extend(anyof_instances) - """ - set additional_properties_model_instances - additional properties must be evaluated at the schema level - so self's additional properties are most important - If self is a composed schema with: - - no properties defined in self - - additionalProperties: False - Then for object payloads every property is an additional property - and they are not allowed, so only empty dict is allowed - - Properties must be set on all matching schemas - so when a property is assigned toa composed instance, it must be set on all - composed instances regardless of additionalProperties presence - keeping it to prevent breaking changes in v5.0.1 - TODO remove cls._additional_properties_model_instances in 6.0.0 - """ - additional_properties_model_instances = [] - if self.additional_properties_type is not None: - additional_properties_model_instances = [self] - - """ - no need to set properties on self in here, they will be set in __init__ - By here all composed schema oneOf/anyOf/allOf instances have their properties set using - model_args - """ - discarded_args = get_discarded_args(self, composed_instances, model_args) - - # map variable names to composed_instances - var_name_to_model_instances = {} - for prop_name in model_args: - if prop_name not in discarded_args: - var_name_to_model_instances[prop_name] = [self] + composed_instances - - return [ - composed_instances, - var_name_to_model_instances, - additional_properties_model_instances, - discarded_args - ] diff --git a/sdks/python/client/argo_workflows/models/__init__.py b/sdks/python/client/argo_workflows/models/__init__.py index 44ef0697df2c..adc852323098 100644 --- a/sdks/python/client/argo_workflows/models/__init__.py +++ b/sdks/python/client/argo_workflows/models/__init__.py @@ -1,405 +1,410 @@ +# coding: utf-8 + # flake8: noqa +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 -# import all models into this package -# if you have many models here with many references from one model to another this may -# raise a RecursionError -# to avoid this, import only the models that you directly need like: -# from from argo_workflows.model.pet import Pet -# or import this package, but before doing it, use: -# import sys -# sys.setrecursionlimit(n) -from argo_workflows.model.aws_elastic_block_store_volume_source import AWSElasticBlockStoreVolumeSource -from argo_workflows.model.affinity import Affinity -from argo_workflows.model.azure_disk_volume_source import AzureDiskVolumeSource -from argo_workflows.model.azure_file_volume_source import AzureFileVolumeSource -from argo_workflows.model.csi_volume_source import CSIVolumeSource -from argo_workflows.model.capabilities import Capabilities -from argo_workflows.model.ceph_fs_volume_source import CephFSVolumeSource -from argo_workflows.model.cinder_volume_source import CinderVolumeSource -from argo_workflows.model.config_map_env_source import ConfigMapEnvSource -from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector -from argo_workflows.model.config_map_projection import ConfigMapProjection -from argo_workflows.model.config_map_volume_source import ConfigMapVolumeSource -from argo_workflows.model.container import Container -from argo_workflows.model.container_port import ContainerPort -from argo_workflows.model.create_options import CreateOptions -from argo_workflows.model.downward_api_projection import DownwardAPIProjection -from argo_workflows.model.downward_api_volume_file import DownwardAPIVolumeFile -from argo_workflows.model.downward_api_volume_source import DownwardAPIVolumeSource -from argo_workflows.model.duration import Duration -from argo_workflows.model.empty_dir_volume_source import EmptyDirVolumeSource -from argo_workflows.model.env_from_source import EnvFromSource -from argo_workflows.model.env_var import EnvVar -from argo_workflows.model.env_var_source import EnvVarSource -from argo_workflows.model.ephemeral_volume_source import EphemeralVolumeSource -from argo_workflows.model.event import Event -from argo_workflows.model.event_series import EventSeries -from argo_workflows.model.event_source import EventSource -from argo_workflows.model.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest -from argo_workflows.model.eventsource_event_source_watch_event import EventsourceEventSourceWatchEvent -from argo_workflows.model.eventsource_log_entry import EventsourceLogEntry -from argo_workflows.model.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest -from argo_workflows.model.exec_action import ExecAction -from argo_workflows.model.fc_volume_source import FCVolumeSource -from argo_workflows.model.flex_volume_source import FlexVolumeSource -from argo_workflows.model.flocker_volume_source import FlockerVolumeSource -from argo_workflows.model.gce_persistent_disk_volume_source import GCEPersistentDiskVolumeSource -from argo_workflows.model.grpc_action import GRPCAction -from argo_workflows.model.git_repo_volume_source import GitRepoVolumeSource -from argo_workflows.model.glusterfs_volume_source import GlusterfsVolumeSource -from argo_workflows.model.google_protobuf_any import GoogleProtobufAny -from argo_workflows.model.group_version_resource import GroupVersionResource -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError -from argo_workflows.model.http_get_action import HTTPGetAction -from argo_workflows.model.http_header import HTTPHeader -from argo_workflows.model.host_alias import HostAlias -from argo_workflows.model.host_path_volume_source import HostPathVolumeSource -from argo_workflows.model.iscsi_volume_source import ISCSIVolumeSource -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_consume_config import IoArgoprojEventsV1alpha1AMQPConsumeConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_event_source import IoArgoprojEventsV1alpha1AMQPEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_exchange_declare_config import IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_queue_bind_config import IoArgoprojEventsV1alpha1AMQPQueueBindConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_queue_declare_config import IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_aws_lambda_trigger import IoArgoprojEventsV1alpha1AWSLambdaTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_amount import IoArgoprojEventsV1alpha1Amount -from argo_workflows.model.io_argoproj_events_v1alpha1_argo_workflow_trigger import IoArgoprojEventsV1alpha1ArgoWorkflowTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_event_hubs_trigger import IoArgoprojEventsV1alpha1AzureEventHubsTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_events_hub_event_source import IoArgoprojEventsV1alpha1AzureEventsHubEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_queue_storage_event_source import IoArgoprojEventsV1alpha1AzureQueueStorageEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_service_bus_event_source import IoArgoprojEventsV1alpha1AzureServiceBusEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_service_bus_trigger import IoArgoprojEventsV1alpha1AzureServiceBusTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff -from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_auth import IoArgoprojEventsV1alpha1BitbucketAuth -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_basic_auth import IoArgoprojEventsV1alpha1BitbucketBasicAuth -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_event_source import IoArgoprojEventsV1alpha1BitbucketEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_repository import IoArgoprojEventsV1alpha1BitbucketRepository -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_server_event_source import IoArgoprojEventsV1alpha1BitbucketServerEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_server_repository import IoArgoprojEventsV1alpha1BitbucketServerRepository -from argo_workflows.model.io_argoproj_events_v1alpha1_calendar_event_source import IoArgoprojEventsV1alpha1CalendarEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_catchup_configuration import IoArgoprojEventsV1alpha1CatchupConfiguration -from argo_workflows.model.io_argoproj_events_v1alpha1_condition import IoArgoprojEventsV1alpha1Condition -from argo_workflows.model.io_argoproj_events_v1alpha1_conditions_reset_by_time import IoArgoprojEventsV1alpha1ConditionsResetByTime -from argo_workflows.model.io_argoproj_events_v1alpha1_conditions_reset_criteria import IoArgoprojEventsV1alpha1ConditionsResetCriteria -from argo_workflows.model.io_argoproj_events_v1alpha1_config_map_persistence import IoArgoprojEventsV1alpha1ConfigMapPersistence -from argo_workflows.model.io_argoproj_events_v1alpha1_custom_trigger import IoArgoprojEventsV1alpha1CustomTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_data_filter import IoArgoprojEventsV1alpha1DataFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_email_trigger import IoArgoprojEventsV1alpha1EmailTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_emitter_event_source import IoArgoprojEventsV1alpha1EmitterEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_event_context import IoArgoprojEventsV1alpha1EventContext -from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency import IoArgoprojEventsV1alpha1EventDependency -from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency_filter import IoArgoprojEventsV1alpha1EventDependencyFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency_transformer import IoArgoprojEventsV1alpha1EventDependencyTransformer -from argo_workflows.model.io_argoproj_events_v1alpha1_event_persistence import IoArgoprojEventsV1alpha1EventPersistence -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_spec import IoArgoprojEventsV1alpha1EventSourceSpec -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_status import IoArgoprojEventsV1alpha1EventSourceStatus -from argo_workflows.model.io_argoproj_events_v1alpha1_expr_filter import IoArgoprojEventsV1alpha1ExprFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_file_artifact import IoArgoprojEventsV1alpha1FileArtifact -from argo_workflows.model.io_argoproj_events_v1alpha1_file_event_source import IoArgoprojEventsV1alpha1FileEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_generic_event_source import IoArgoprojEventsV1alpha1GenericEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_gerrit_event_source import IoArgoprojEventsV1alpha1GerritEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_git_artifact import IoArgoprojEventsV1alpha1GitArtifact -from argo_workflows.model.io_argoproj_events_v1alpha1_git_creds import IoArgoprojEventsV1alpha1GitCreds -from argo_workflows.model.io_argoproj_events_v1alpha1_git_remote_config import IoArgoprojEventsV1alpha1GitRemoteConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_github_app_creds import IoArgoprojEventsV1alpha1GithubAppCreds -from argo_workflows.model.io_argoproj_events_v1alpha1_github_event_source import IoArgoprojEventsV1alpha1GithubEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_gitlab_event_source import IoArgoprojEventsV1alpha1GitlabEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_hdfs_event_source import IoArgoprojEventsV1alpha1HDFSEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_http_trigger import IoArgoprojEventsV1alpha1HTTPTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_int64_or_string import IoArgoprojEventsV1alpha1Int64OrString -from argo_workflows.model.io_argoproj_events_v1alpha1_k8_s_resource_policy import IoArgoprojEventsV1alpha1K8SResourcePolicy -from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_consumer_group import IoArgoprojEventsV1alpha1KafkaConsumerGroup -from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_event_source import IoArgoprojEventsV1alpha1KafkaEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_trigger import IoArgoprojEventsV1alpha1KafkaTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_log_trigger import IoArgoprojEventsV1alpha1LogTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_mqtt_event_source import IoArgoprojEventsV1alpha1MQTTEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_metadata import IoArgoprojEventsV1alpha1Metadata -from argo_workflows.model.io_argoproj_events_v1alpha1_nats_auth import IoArgoprojEventsV1alpha1NATSAuth -from argo_workflows.model.io_argoproj_events_v1alpha1_nats_events_source import IoArgoprojEventsV1alpha1NATSEventsSource -from argo_workflows.model.io_argoproj_events_v1alpha1_nats_trigger import IoArgoprojEventsV1alpha1NATSTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_nsq_event_source import IoArgoprojEventsV1alpha1NSQEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_open_whisk_trigger import IoArgoprojEventsV1alpha1OpenWhiskTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_owned_repositories import IoArgoprojEventsV1alpha1OwnedRepositories -from argo_workflows.model.io_argoproj_events_v1alpha1_payload_field import IoArgoprojEventsV1alpha1PayloadField -from argo_workflows.model.io_argoproj_events_v1alpha1_pub_sub_event_source import IoArgoprojEventsV1alpha1PubSubEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_pulsar_event_source import IoArgoprojEventsV1alpha1PulsarEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_pulsar_trigger import IoArgoprojEventsV1alpha1PulsarTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_rate_limit import IoArgoprojEventsV1alpha1RateLimit -from argo_workflows.model.io_argoproj_events_v1alpha1_redis_event_source import IoArgoprojEventsV1alpha1RedisEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_redis_stream_event_source import IoArgoprojEventsV1alpha1RedisStreamEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_resource import IoArgoprojEventsV1alpha1Resource -from argo_workflows.model.io_argoproj_events_v1alpha1_resource_event_source import IoArgoprojEventsV1alpha1ResourceEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_resource_filter import IoArgoprojEventsV1alpha1ResourceFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact -from argo_workflows.model.io_argoproj_events_v1alpha1_s3_bucket import IoArgoprojEventsV1alpha1S3Bucket -from argo_workflows.model.io_argoproj_events_v1alpha1_s3_filter import IoArgoprojEventsV1alpha1S3Filter -from argo_workflows.model.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_sftp_event_source import IoArgoprojEventsV1alpha1SFTPEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_sns_event_source import IoArgoprojEventsV1alpha1SNSEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_sqs_event_source import IoArgoprojEventsV1alpha1SQSEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_schema_registry_config import IoArgoprojEventsV1alpha1SchemaRegistryConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_secure_header import IoArgoprojEventsV1alpha1SecureHeader -from argo_workflows.model.io_argoproj_events_v1alpha1_selector import IoArgoprojEventsV1alpha1Selector -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_spec import IoArgoprojEventsV1alpha1SensorSpec -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_status import IoArgoprojEventsV1alpha1SensorStatus -from argo_workflows.model.io_argoproj_events_v1alpha1_service import IoArgoprojEventsV1alpha1Service -from argo_workflows.model.io_argoproj_events_v1alpha1_slack_event_source import IoArgoprojEventsV1alpha1SlackEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_slack_sender import IoArgoprojEventsV1alpha1SlackSender -from argo_workflows.model.io_argoproj_events_v1alpha1_slack_thread import IoArgoprojEventsV1alpha1SlackThread -from argo_workflows.model.io_argoproj_events_v1alpha1_slack_trigger import IoArgoprojEventsV1alpha1SlackTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_standard_k8_s_trigger import IoArgoprojEventsV1alpha1StandardK8STrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status -from argo_workflows.model.io_argoproj_events_v1alpha1_status_policy import IoArgoprojEventsV1alpha1StatusPolicy -from argo_workflows.model.io_argoproj_events_v1alpha1_storage_grid_event_source import IoArgoprojEventsV1alpha1StorageGridEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_storage_grid_filter import IoArgoprojEventsV1alpha1StorageGridFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_stripe_event_source import IoArgoprojEventsV1alpha1StripeEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template -from argo_workflows.model.io_argoproj_events_v1alpha1_time_filter import IoArgoprojEventsV1alpha1TimeFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger import IoArgoprojEventsV1alpha1Trigger -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter_source import IoArgoprojEventsV1alpha1TriggerParameterSource -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_policy import IoArgoprojEventsV1alpha1TriggerPolicy -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_template import IoArgoprojEventsV1alpha1TriggerTemplate -from argo_workflows.model.io_argoproj_events_v1alpha1_url_artifact import IoArgoprojEventsV1alpha1URLArtifact -from argo_workflows.model.io_argoproj_events_v1alpha1_value_from_source import IoArgoprojEventsV1alpha1ValueFromSource -from argo_workflows.model.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext -from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_event_source import IoArgoprojEventsV1alpha1WebhookEventSource -from argo_workflows.model.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy -from argo_workflows.model.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments -from argo_workflows.model.io_argoproj_workflow_v1alpha1_art_gc_status import IoArgoprojWorkflowV1alpha1ArtGCStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc_spec import IoArgoprojWorkflowV1alpha1ArtifactGCSpec -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_gc_status import IoArgoprojWorkflowV1alpha1ArtifactGCStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_node_spec import IoArgoprojWorkflowV1alpha1ArtifactNodeSpec -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_paths import IoArgoprojWorkflowV1alpha1ArtifactPaths -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactRepository -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository_ref import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_repository_ref_status import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_result import IoArgoprojWorkflowV1alpha1ArtifactResult -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifact_result_node_status import IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_artifactory_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository -from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_azure_artifact_repository import IoArgoprojWorkflowV1alpha1AzureArtifactRepository -from argo_workflows.model.io_argoproj_workflow_v1alpha1_backoff import IoArgoprojWorkflowV1alpha1Backoff -from argo_workflows.model.io_argoproj_workflow_v1alpha1_basic_auth import IoArgoprojWorkflowV1alpha1BasicAuth -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cache import IoArgoprojWorkflowV1alpha1Cache -from argo_workflows.model.io_argoproj_workflow_v1alpha1_client_cert_auth import IoArgoprojWorkflowV1alpha1ClientCertAuth -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_list import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_column import IoArgoprojWorkflowV1alpha1Column -from argo_workflows.model.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition -from argo_workflows.model.io_argoproj_workflow_v1alpha1_container_node import IoArgoprojWorkflowV1alpha1ContainerNode -from argo_workflows.model.io_argoproj_workflow_v1alpha1_container_set_retry_strategy import IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy -from argo_workflows.model.io_argoproj_workflow_v1alpha1_container_set_template import IoArgoprojWorkflowV1alpha1ContainerSetTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_continue_on import IoArgoprojWorkflowV1alpha1ContinueOn -from argo_workflows.model.io_argoproj_workflow_v1alpha1_counter import IoArgoprojWorkflowV1alpha1Counter -from argo_workflows.model.io_argoproj_workflow_v1alpha1_create_cron_workflow_request import IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_create_s3_bucket_options import IoArgoprojWorkflowV1alpha1CreateS3BucketOptions -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_list import IoArgoprojWorkflowV1alpha1CronWorkflowList -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_resume_request import IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_spec import IoArgoprojWorkflowV1alpha1CronWorkflowSpec -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_status import IoArgoprojWorkflowV1alpha1CronWorkflowStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request import IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_dag_task import IoArgoprojWorkflowV1alpha1DAGTask -from argo_workflows.model.io_argoproj_workflow_v1alpha1_dag_template import IoArgoprojWorkflowV1alpha1DAGTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_data import IoArgoprojWorkflowV1alpha1Data -from argo_workflows.model.io_argoproj_workflow_v1alpha1_data_source import IoArgoprojWorkflowV1alpha1DataSource -from argo_workflows.model.io_argoproj_workflow_v1alpha1_event import IoArgoprojWorkflowV1alpha1Event -from argo_workflows.model.io_argoproj_workflow_v1alpha1_executor_config import IoArgoprojWorkflowV1alpha1ExecutorConfig -from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_gcs_artifact_repository import IoArgoprojWorkflowV1alpha1GCSArtifactRepository -from argo_workflows.model.io_argoproj_workflow_v1alpha1_gauge import IoArgoprojWorkflowV1alpha1Gauge -from argo_workflows.model.io_argoproj_workflow_v1alpha1_get_user_info_response import IoArgoprojWorkflowV1alpha1GetUserInfoResponse -from argo_workflows.model.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_hdfs_artifact_repository import IoArgoprojWorkflowV1alpha1HDFSArtifactRepository -from argo_workflows.model.io_argoproj_workflow_v1alpha1_http import IoArgoprojWorkflowV1alpha1HTTP -from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_auth import IoArgoprojWorkflowV1alpha1HTTPAuth -from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_body_source import IoArgoprojWorkflowV1alpha1HTTPBodySource -from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_header import IoArgoprojWorkflowV1alpha1HTTPHeader -from argo_workflows.model.io_argoproj_workflow_v1alpha1_http_header_source import IoArgoprojWorkflowV1alpha1HTTPHeaderSource -from argo_workflows.model.io_argoproj_workflow_v1alpha1_header import IoArgoprojWorkflowV1alpha1Header -from argo_workflows.model.io_argoproj_workflow_v1alpha1_histogram import IoArgoprojWorkflowV1alpha1Histogram -from argo_workflows.model.io_argoproj_workflow_v1alpha1_info_response import IoArgoprojWorkflowV1alpha1InfoResponse -from argo_workflows.model.io_argoproj_workflow_v1alpha1_inputs import IoArgoprojWorkflowV1alpha1Inputs -from argo_workflows.model.io_argoproj_workflow_v1alpha1_label_keys import IoArgoprojWorkflowV1alpha1LabelKeys -from argo_workflows.model.io_argoproj_workflow_v1alpha1_label_value_from import IoArgoprojWorkflowV1alpha1LabelValueFrom -from argo_workflows.model.io_argoproj_workflow_v1alpha1_label_values import IoArgoprojWorkflowV1alpha1LabelValues -from argo_workflows.model.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook -from argo_workflows.model.io_argoproj_workflow_v1alpha1_link import IoArgoprojWorkflowV1alpha1Link -from argo_workflows.model.io_argoproj_workflow_v1alpha1_lint_cron_workflow_request import IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_log_entry import IoArgoprojWorkflowV1alpha1LogEntry -from argo_workflows.model.io_argoproj_workflow_v1alpha1_manifest_from import IoArgoprojWorkflowV1alpha1ManifestFrom -from argo_workflows.model.io_argoproj_workflow_v1alpha1_memoization_status import IoArgoprojWorkflowV1alpha1MemoizationStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_memoize import IoArgoprojWorkflowV1alpha1Memoize -from argo_workflows.model.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata -from argo_workflows.model.io_argoproj_workflow_v1alpha1_metric_label import IoArgoprojWorkflowV1alpha1MetricLabel -from argo_workflows.model.io_argoproj_workflow_v1alpha1_metrics import IoArgoprojWorkflowV1alpha1Metrics -from argo_workflows.model.io_argoproj_workflow_v1alpha1_mutex import IoArgoprojWorkflowV1alpha1Mutex -from argo_workflows.model.io_argoproj_workflow_v1alpha1_mutex_holding import IoArgoprojWorkflowV1alpha1MutexHolding -from argo_workflows.model.io_argoproj_workflow_v1alpha1_mutex_status import IoArgoprojWorkflowV1alpha1MutexStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_flag import IoArgoprojWorkflowV1alpha1NodeFlag -from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_result import IoArgoprojWorkflowV1alpha1NodeResult -from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_status import IoArgoprojWorkflowV1alpha1NodeStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_node_synchronization_status import IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_o_auth2_auth import IoArgoprojWorkflowV1alpha1OAuth2Auth -from argo_workflows.model.io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param import IoArgoprojWorkflowV1alpha1OAuth2EndpointParam -from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_artifact_repository import IoArgoprojWorkflowV1alpha1OSSArtifactRepository -from argo_workflows.model.io_argoproj_workflow_v1alpha1_oss_lifecycle_rule import IoArgoprojWorkflowV1alpha1OSSLifecycleRule -from argo_workflows.model.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs -from argo_workflows.model.io_argoproj_workflow_v1alpha1_parallel_steps import IoArgoprojWorkflowV1alpha1ParallelSteps -from argo_workflows.model.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter -from argo_workflows.model.io_argoproj_workflow_v1alpha1_pod_gc import IoArgoprojWorkflowV1alpha1PodGC -from argo_workflows.model.io_argoproj_workflow_v1alpha1_prometheus import IoArgoprojWorkflowV1alpha1Prometheus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_resource_template import IoArgoprojWorkflowV1alpha1ResourceTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request import IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_retry_affinity import IoArgoprojWorkflowV1alpha1RetryAffinity -from argo_workflows.model.io_argoproj_workflow_v1alpha1_retry_archived_workflow_request import IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_retry_strategy import IoArgoprojWorkflowV1alpha1RetryStrategy -from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact -from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_artifact_repository import IoArgoprojWorkflowV1alpha1S3ArtifactRepository -from argo_workflows.model.io_argoproj_workflow_v1alpha1_s3_encryption_options import IoArgoprojWorkflowV1alpha1S3EncryptionOptions -from argo_workflows.model.io_argoproj_workflow_v1alpha1_script_template import IoArgoprojWorkflowV1alpha1ScriptTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_semaphore_holding import IoArgoprojWorkflowV1alpha1SemaphoreHolding -from argo_workflows.model.io_argoproj_workflow_v1alpha1_semaphore_ref import IoArgoprojWorkflowV1alpha1SemaphoreRef -from argo_workflows.model.io_argoproj_workflow_v1alpha1_semaphore_status import IoArgoprojWorkflowV1alpha1SemaphoreStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_sequence import IoArgoprojWorkflowV1alpha1Sequence -from argo_workflows.model.io_argoproj_workflow_v1alpha1_stop_strategy import IoArgoprojWorkflowV1alpha1StopStrategy -from argo_workflows.model.io_argoproj_workflow_v1alpha1_submit import IoArgoprojWorkflowV1alpha1Submit -from argo_workflows.model.io_argoproj_workflow_v1alpha1_submit_opts import IoArgoprojWorkflowV1alpha1SubmitOpts -from argo_workflows.model.io_argoproj_workflow_v1alpha1_suspend_template import IoArgoprojWorkflowV1alpha1SuspendTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_synchronization import IoArgoprojWorkflowV1alpha1Synchronization -from argo_workflows.model.io_argoproj_workflow_v1alpha1_synchronization_status import IoArgoprojWorkflowV1alpha1SynchronizationStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_ttl_strategy import IoArgoprojWorkflowV1alpha1TTLStrategy -from argo_workflows.model.io_argoproj_workflow_v1alpha1_tar_strategy import IoArgoprojWorkflowV1alpha1TarStrategy -from argo_workflows.model.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template -from argo_workflows.model.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef -from argo_workflows.model.io_argoproj_workflow_v1alpha1_transformation_step import IoArgoprojWorkflowV1alpha1TransformationStep -from argo_workflows.model.io_argoproj_workflow_v1alpha1_update_cron_workflow_request import IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_user_container import IoArgoprojWorkflowV1alpha1UserContainer -from argo_workflows.model.io_argoproj_workflow_v1alpha1_value_from import IoArgoprojWorkflowV1alpha1ValueFrom -from argo_workflows.model.io_argoproj_workflow_v1alpha1_version import IoArgoprojWorkflowV1alpha1Version -from argo_workflows.model.io_argoproj_workflow_v1alpha1_volume_claim_gc import IoArgoprojWorkflowV1alpha1VolumeClaimGC -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import IoArgoprojWorkflowV1alpha1WorkflowCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_event_binding import IoArgoprojWorkflowV1alpha1WorkflowEventBinding -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_event_binding_list import IoArgoprojWorkflowV1alpha1WorkflowEventBindingList -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_event_binding_spec import IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc import IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_lint_request import IoArgoprojWorkflowV1alpha1WorkflowLintRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_metadata import IoArgoprojWorkflowV1alpha1WorkflowMetadata -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_resubmit_request import IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_resume_request import IoArgoprojWorkflowV1alpha1WorkflowResumeRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_retry_request import IoArgoprojWorkflowV1alpha1WorkflowRetryRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_set_request import IoArgoprojWorkflowV1alpha1WorkflowSetRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_status import IoArgoprojWorkflowV1alpha1WorkflowStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_step import IoArgoprojWorkflowV1alpha1WorkflowStep -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_stop_request import IoArgoprojWorkflowV1alpha1WorkflowStopRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_submit_request import IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_suspend_request import IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_task_set_spec import IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_task_set_status import IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_create_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_list import IoArgoprojWorkflowV1alpha1WorkflowTemplateList -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_ref import IoArgoprojWorkflowV1alpha1WorkflowTemplateRef -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_update_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_terminate_request import IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_watch_event import IoArgoprojWorkflowV1alpha1WorkflowWatchEvent -from argo_workflows.model.io_k8s_api_policy_v1_pod_disruption_budget_spec import IoK8sApiPolicyV1PodDisruptionBudgetSpec -from argo_workflows.model.key_to_path import KeyToPath -from argo_workflows.model.label_selector import LabelSelector -from argo_workflows.model.label_selector_requirement import LabelSelectorRequirement -from argo_workflows.model.lifecycle import Lifecycle -from argo_workflows.model.lifecycle_handler import LifecycleHandler -from argo_workflows.model.list_meta import ListMeta -from argo_workflows.model.local_object_reference import LocalObjectReference -from argo_workflows.model.managed_fields_entry import ManagedFieldsEntry -from argo_workflows.model.nfs_volume_source import NFSVolumeSource -from argo_workflows.model.node_affinity import NodeAffinity -from argo_workflows.model.node_selector import NodeSelector -from argo_workflows.model.node_selector_requirement import NodeSelectorRequirement -from argo_workflows.model.node_selector_term import NodeSelectorTerm -from argo_workflows.model.object_field_selector import ObjectFieldSelector -from argo_workflows.model.object_meta import ObjectMeta -from argo_workflows.model.object_reference import ObjectReference -from argo_workflows.model.owner_reference import OwnerReference -from argo_workflows.model.persistent_volume_claim import PersistentVolumeClaim -from argo_workflows.model.persistent_volume_claim_condition import PersistentVolumeClaimCondition -from argo_workflows.model.persistent_volume_claim_spec import PersistentVolumeClaimSpec -from argo_workflows.model.persistent_volume_claim_status import PersistentVolumeClaimStatus -from argo_workflows.model.persistent_volume_claim_template import PersistentVolumeClaimTemplate -from argo_workflows.model.persistent_volume_claim_volume_source import PersistentVolumeClaimVolumeSource -from argo_workflows.model.photon_persistent_disk_volume_source import PhotonPersistentDiskVolumeSource -from argo_workflows.model.pod_affinity import PodAffinity -from argo_workflows.model.pod_affinity_term import PodAffinityTerm -from argo_workflows.model.pod_anti_affinity import PodAntiAffinity -from argo_workflows.model.pod_dns_config import PodDNSConfig -from argo_workflows.model.pod_dns_config_option import PodDNSConfigOption -from argo_workflows.model.pod_security_context import PodSecurityContext -from argo_workflows.model.portworx_volume_source import PortworxVolumeSource -from argo_workflows.model.preferred_scheduling_term import PreferredSchedulingTerm -from argo_workflows.model.probe import Probe -from argo_workflows.model.projected_volume_source import ProjectedVolumeSource -from argo_workflows.model.quobyte_volume_source import QuobyteVolumeSource -from argo_workflows.model.rbd_volume_source import RBDVolumeSource -from argo_workflows.model.resource_field_selector import ResourceFieldSelector -from argo_workflows.model.resource_requirements import ResourceRequirements -from argo_workflows.model.se_linux_options import SELinuxOptions -from argo_workflows.model.scale_io_volume_source import ScaleIOVolumeSource -from argo_workflows.model.seccomp_profile import SeccompProfile -from argo_workflows.model.secret_env_source import SecretEnvSource -from argo_workflows.model.secret_key_selector import SecretKeySelector -from argo_workflows.model.secret_projection import SecretProjection -from argo_workflows.model.secret_volume_source import SecretVolumeSource -from argo_workflows.model.security_context import SecurityContext -from argo_workflows.model.sensor_create_sensor_request import SensorCreateSensorRequest -from argo_workflows.model.sensor_log_entry import SensorLogEntry -from argo_workflows.model.sensor_sensor_watch_event import SensorSensorWatchEvent -from argo_workflows.model.sensor_update_sensor_request import SensorUpdateSensorRequest -from argo_workflows.model.service_account_token_projection import ServiceAccountTokenProjection -from argo_workflows.model.service_port import ServicePort -from argo_workflows.model.status_cause import StatusCause -from argo_workflows.model.storage_os_volume_source import StorageOSVolumeSource -from argo_workflows.model.stream_result_of_event import StreamResultOfEvent -from argo_workflows.model.stream_result_of_eventsource_event_source_watch_event import StreamResultOfEventsourceEventSourceWatchEvent -from argo_workflows.model.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry -from argo_workflows.model.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry -from argo_workflows.model.stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event import StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent -from argo_workflows.model.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry -from argo_workflows.model.stream_result_of_sensor_sensor_watch_event import StreamResultOfSensorSensorWatchEvent -from argo_workflows.model.sysctl import Sysctl -from argo_workflows.model.tcp_socket_action import TCPSocketAction -from argo_workflows.model.toleration import Toleration -from argo_workflows.model.typed_local_object_reference import TypedLocalObjectReference -from argo_workflows.model.volume import Volume -from argo_workflows.model.volume_device import VolumeDevice -from argo_workflows.model.volume_mount import VolumeMount -from argo_workflows.model.volume_projection import VolumeProjection -from argo_workflows.model.vsphere_virtual_disk_volume_source import VsphereVirtualDiskVolumeSource -from argo_workflows.model.weighted_pod_affinity_term import WeightedPodAffinityTerm -from argo_workflows.model.windows_security_context_options import WindowsSecurityContextOptions +# import models into model package +from argo_workflows.models.aws_elastic_block_store_volume_source import AWSElasticBlockStoreVolumeSource +from argo_workflows.models.affinity import Affinity +from argo_workflows.models.azure_disk_volume_source import AzureDiskVolumeSource +from argo_workflows.models.azure_file_volume_source import AzureFileVolumeSource +from argo_workflows.models.csi_volume_source import CSIVolumeSource +from argo_workflows.models.capabilities import Capabilities +from argo_workflows.models.ceph_fs_volume_source import CephFSVolumeSource +from argo_workflows.models.cinder_volume_source import CinderVolumeSource +from argo_workflows.models.config_map_env_source import ConfigMapEnvSource +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from argo_workflows.models.config_map_projection import ConfigMapProjection +from argo_workflows.models.config_map_volume_source import ConfigMapVolumeSource +from argo_workflows.models.container import Container +from argo_workflows.models.container_port import ContainerPort +from argo_workflows.models.create_options import CreateOptions +from argo_workflows.models.downward_api_projection import DownwardAPIProjection +from argo_workflows.models.downward_api_volume_file import DownwardAPIVolumeFile +from argo_workflows.models.downward_api_volume_source import DownwardAPIVolumeSource +from argo_workflows.models.duration import Duration +from argo_workflows.models.empty_dir_volume_source import EmptyDirVolumeSource +from argo_workflows.models.env_from_source import EnvFromSource +from argo_workflows.models.env_var import EnvVar +from argo_workflows.models.env_var_source import EnvVarSource +from argo_workflows.models.ephemeral_volume_source import EphemeralVolumeSource +from argo_workflows.models.event import Event +from argo_workflows.models.event_series import EventSeries +from argo_workflows.models.event_source import EventSource +from argo_workflows.models.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest +from argo_workflows.models.eventsource_event_source_watch_event import EventsourceEventSourceWatchEvent +from argo_workflows.models.eventsource_log_entry import EventsourceLogEntry +from argo_workflows.models.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest +from argo_workflows.models.exec_action import ExecAction +from argo_workflows.models.fc_volume_source import FCVolumeSource +from argo_workflows.models.flex_volume_source import FlexVolumeSource +from argo_workflows.models.flocker_volume_source import FlockerVolumeSource +from argo_workflows.models.gce_persistent_disk_volume_source import GCEPersistentDiskVolumeSource +from argo_workflows.models.grpc_action import GRPCAction +from argo_workflows.models.git_repo_volume_source import GitRepoVolumeSource +from argo_workflows.models.glusterfs_volume_source import GlusterfsVolumeSource +from argo_workflows.models.google_protobuf_any import GoogleProtobufAny +from argo_workflows.models.group_version_resource import GroupVersionResource +from argo_workflows.models.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError +from argo_workflows.models.http_get_action import HTTPGetAction +from argo_workflows.models.http_header import HTTPHeader +from argo_workflows.models.host_alias import HostAlias +from argo_workflows.models.host_path_volume_source import HostPathVolumeSource +from argo_workflows.models.iscsi_volume_source import ISCSIVolumeSource +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_consume_config import IoArgoprojEventsV1alpha1AMQPConsumeConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_event_source import IoArgoprojEventsV1alpha1AMQPEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_exchange_declare_config import IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_queue_bind_config import IoArgoprojEventsV1alpha1AMQPQueueBindConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_queue_declare_config import IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_aws_lambda_trigger import IoArgoprojEventsV1alpha1AWSLambdaTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_amount import IoArgoprojEventsV1alpha1Amount +from argo_workflows.models.io_argoproj_events_v1alpha1_argo_workflow_trigger import IoArgoprojEventsV1alpha1ArgoWorkflowTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_event_hubs_trigger import IoArgoprojEventsV1alpha1AzureEventHubsTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_events_hub_event_source import IoArgoprojEventsV1alpha1AzureEventsHubEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_queue_storage_event_source import IoArgoprojEventsV1alpha1AzureQueueStorageEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_service_bus_event_source import IoArgoprojEventsV1alpha1AzureServiceBusEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_service_bus_trigger import IoArgoprojEventsV1alpha1AzureServiceBusTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_auth import IoArgoprojEventsV1alpha1BitbucketAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_basic_auth import IoArgoprojEventsV1alpha1BitbucketBasicAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_event_source import IoArgoprojEventsV1alpha1BitbucketEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_repository import IoArgoprojEventsV1alpha1BitbucketRepository +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_server_event_source import IoArgoprojEventsV1alpha1BitbucketServerEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_server_repository import IoArgoprojEventsV1alpha1BitbucketServerRepository +from argo_workflows.models.io_argoproj_events_v1alpha1_calendar_event_source import IoArgoprojEventsV1alpha1CalendarEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_catchup_configuration import IoArgoprojEventsV1alpha1CatchupConfiguration +from argo_workflows.models.io_argoproj_events_v1alpha1_condition import IoArgoprojEventsV1alpha1Condition +from argo_workflows.models.io_argoproj_events_v1alpha1_conditions_reset_by_time import IoArgoprojEventsV1alpha1ConditionsResetByTime +from argo_workflows.models.io_argoproj_events_v1alpha1_conditions_reset_criteria import IoArgoprojEventsV1alpha1ConditionsResetCriteria +from argo_workflows.models.io_argoproj_events_v1alpha1_config_map_persistence import IoArgoprojEventsV1alpha1ConfigMapPersistence +from argo_workflows.models.io_argoproj_events_v1alpha1_custom_trigger import IoArgoprojEventsV1alpha1CustomTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_data_filter import IoArgoprojEventsV1alpha1DataFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_email_trigger import IoArgoprojEventsV1alpha1EmailTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_emitter_event_source import IoArgoprojEventsV1alpha1EmitterEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_event_context import IoArgoprojEventsV1alpha1EventContext +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency import IoArgoprojEventsV1alpha1EventDependency +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency_filter import IoArgoprojEventsV1alpha1EventDependencyFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency_transformer import IoArgoprojEventsV1alpha1EventDependencyTransformer +from argo_workflows.models.io_argoproj_events_v1alpha1_event_persistence import IoArgoprojEventsV1alpha1EventPersistence +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_spec import IoArgoprojEventsV1alpha1EventSourceSpec +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_status import IoArgoprojEventsV1alpha1EventSourceStatus +from argo_workflows.models.io_argoproj_events_v1alpha1_expr_filter import IoArgoprojEventsV1alpha1ExprFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_file_artifact import IoArgoprojEventsV1alpha1FileArtifact +from argo_workflows.models.io_argoproj_events_v1alpha1_file_event_source import IoArgoprojEventsV1alpha1FileEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_generic_event_source import IoArgoprojEventsV1alpha1GenericEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_gerrit_event_source import IoArgoprojEventsV1alpha1GerritEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_git_artifact import IoArgoprojEventsV1alpha1GitArtifact +from argo_workflows.models.io_argoproj_events_v1alpha1_git_creds import IoArgoprojEventsV1alpha1GitCreds +from argo_workflows.models.io_argoproj_events_v1alpha1_git_remote_config import IoArgoprojEventsV1alpha1GitRemoteConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_github_app_creds import IoArgoprojEventsV1alpha1GithubAppCreds +from argo_workflows.models.io_argoproj_events_v1alpha1_github_event_source import IoArgoprojEventsV1alpha1GithubEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_gitlab_event_source import IoArgoprojEventsV1alpha1GitlabEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_hdfs_event_source import IoArgoprojEventsV1alpha1HDFSEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_http_trigger import IoArgoprojEventsV1alpha1HTTPTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_int64_or_string import IoArgoprojEventsV1alpha1Int64OrString +from argo_workflows.models.io_argoproj_events_v1alpha1_k8_s_resource_policy import IoArgoprojEventsV1alpha1K8SResourcePolicy +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_consumer_group import IoArgoprojEventsV1alpha1KafkaConsumerGroup +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_event_source import IoArgoprojEventsV1alpha1KafkaEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_trigger import IoArgoprojEventsV1alpha1KafkaTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_log_trigger import IoArgoprojEventsV1alpha1LogTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_mqtt_event_source import IoArgoprojEventsV1alpha1MQTTEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_metadata import IoArgoprojEventsV1alpha1Metadata +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_auth import IoArgoprojEventsV1alpha1NATSAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_events_source import IoArgoprojEventsV1alpha1NATSEventsSource +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_trigger import IoArgoprojEventsV1alpha1NATSTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_nsq_event_source import IoArgoprojEventsV1alpha1NSQEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_open_whisk_trigger import IoArgoprojEventsV1alpha1OpenWhiskTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_owned_repositories import IoArgoprojEventsV1alpha1OwnedRepositories +from argo_workflows.models.io_argoproj_events_v1alpha1_payload_field import IoArgoprojEventsV1alpha1PayloadField +from argo_workflows.models.io_argoproj_events_v1alpha1_pub_sub_event_source import IoArgoprojEventsV1alpha1PubSubEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_pulsar_event_source import IoArgoprojEventsV1alpha1PulsarEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_pulsar_trigger import IoArgoprojEventsV1alpha1PulsarTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_rate_limit import IoArgoprojEventsV1alpha1RateLimit +from argo_workflows.models.io_argoproj_events_v1alpha1_redis_event_source import IoArgoprojEventsV1alpha1RedisEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_redis_stream_event_source import IoArgoprojEventsV1alpha1RedisStreamEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_resource import IoArgoprojEventsV1alpha1Resource +from argo_workflows.models.io_argoproj_events_v1alpha1_resource_event_source import IoArgoprojEventsV1alpha1ResourceEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_resource_filter import IoArgoprojEventsV1alpha1ResourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_bucket import IoArgoprojEventsV1alpha1S3Bucket +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_filter import IoArgoprojEventsV1alpha1S3Filter +from argo_workflows.models.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_sftp_event_source import IoArgoprojEventsV1alpha1SFTPEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_sns_event_source import IoArgoprojEventsV1alpha1SNSEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_sqs_event_source import IoArgoprojEventsV1alpha1SQSEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_schema_registry_config import IoArgoprojEventsV1alpha1SchemaRegistryConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_secure_header import IoArgoprojEventsV1alpha1SecureHeader +from argo_workflows.models.io_argoproj_events_v1alpha1_selector import IoArgoprojEventsV1alpha1Selector +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_spec import IoArgoprojEventsV1alpha1SensorSpec +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_status import IoArgoprojEventsV1alpha1SensorStatus +from argo_workflows.models.io_argoproj_events_v1alpha1_service import IoArgoprojEventsV1alpha1Service +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_event_source import IoArgoprojEventsV1alpha1SlackEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_sender import IoArgoprojEventsV1alpha1SlackSender +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_thread import IoArgoprojEventsV1alpha1SlackThread +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_trigger import IoArgoprojEventsV1alpha1SlackTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_standard_k8_s_trigger import IoArgoprojEventsV1alpha1StandardK8STrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status +from argo_workflows.models.io_argoproj_events_v1alpha1_status_policy import IoArgoprojEventsV1alpha1StatusPolicy +from argo_workflows.models.io_argoproj_events_v1alpha1_storage_grid_event_source import IoArgoprojEventsV1alpha1StorageGridEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_storage_grid_filter import IoArgoprojEventsV1alpha1StorageGridFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_stripe_event_source import IoArgoprojEventsV1alpha1StripeEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template +from argo_workflows.models.io_argoproj_events_v1alpha1_time_filter import IoArgoprojEventsV1alpha1TimeFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger import IoArgoprojEventsV1alpha1Trigger +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter_source import IoArgoprojEventsV1alpha1TriggerParameterSource +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_policy import IoArgoprojEventsV1alpha1TriggerPolicy +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_template import IoArgoprojEventsV1alpha1TriggerTemplate +from argo_workflows.models.io_argoproj_events_v1alpha1_url_artifact import IoArgoprojEventsV1alpha1URLArtifact +from argo_workflows.models.io_argoproj_events_v1alpha1_value_from_source import IoArgoprojEventsV1alpha1ValueFromSource +from argo_workflows.models.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_event_source import IoArgoprojEventsV1alpha1WebhookEventSource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments +from argo_workflows.models.io_argoproj_workflow_v1alpha1_art_gc_status import IoArgoprojWorkflowV1alpha1ArtGCStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc_spec import IoArgoprojWorkflowV1alpha1ArtifactGCSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc_status import IoArgoprojWorkflowV1alpha1ArtifactGCStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_node_spec import IoArgoprojWorkflowV1alpha1ArtifactNodeSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_paths import IoArgoprojWorkflowV1alpha1ArtifactPaths +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository_ref import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository_ref_status import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_result import IoArgoprojWorkflowV1alpha1ArtifactResult +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_result_node_status import IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact_repository import IoArgoprojWorkflowV1alpha1AzureArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_backoff import IoArgoprojWorkflowV1alpha1Backoff +from argo_workflows.models.io_argoproj_workflow_v1alpha1_basic_auth import IoArgoprojWorkflowV1alpha1BasicAuth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cache import IoArgoprojWorkflowV1alpha1Cache +from argo_workflows.models.io_argoproj_workflow_v1alpha1_client_cert_auth import IoArgoprojWorkflowV1alpha1ClientCertAuth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_list import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_column import IoArgoprojWorkflowV1alpha1Column +from argo_workflows.models.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_node import IoArgoprojWorkflowV1alpha1ContainerNode +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_set_retry_strategy import IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_set_template import IoArgoprojWorkflowV1alpha1ContainerSetTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_continue_on import IoArgoprojWorkflowV1alpha1ContinueOn +from argo_workflows.models.io_argoproj_workflow_v1alpha1_counter import IoArgoprojWorkflowV1alpha1Counter +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_cron_workflow_request import IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_s3_bucket_options import IoArgoprojWorkflowV1alpha1CreateS3BucketOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_list import IoArgoprojWorkflowV1alpha1CronWorkflowList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_resume_request import IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_spec import IoArgoprojWorkflowV1alpha1CronWorkflowSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_status import IoArgoprojWorkflowV1alpha1CronWorkflowStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request import IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_dag_task import IoArgoprojWorkflowV1alpha1DAGTask +from argo_workflows.models.io_argoproj_workflow_v1alpha1_dag_template import IoArgoprojWorkflowV1alpha1DAGTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_data import IoArgoprojWorkflowV1alpha1Data +from argo_workflows.models.io_argoproj_workflow_v1alpha1_data_source import IoArgoprojWorkflowV1alpha1DataSource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_event import IoArgoprojWorkflowV1alpha1Event +from argo_workflows.models.io_argoproj_workflow_v1alpha1_executor_config import IoArgoprojWorkflowV1alpha1ExecutorConfig +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact_repository import IoArgoprojWorkflowV1alpha1GCSArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gauge import IoArgoprojWorkflowV1alpha1Gauge +from argo_workflows.models.io_argoproj_workflow_v1alpha1_get_user_info_response import IoArgoprojWorkflowV1alpha1GetUserInfoResponse +from argo_workflows.models.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact_repository import IoArgoprojWorkflowV1alpha1HDFSArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http import IoArgoprojWorkflowV1alpha1HTTP +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_auth import IoArgoprojWorkflowV1alpha1HTTPAuth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_body_source import IoArgoprojWorkflowV1alpha1HTTPBodySource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_header import IoArgoprojWorkflowV1alpha1HTTPHeader +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_header_source import IoArgoprojWorkflowV1alpha1HTTPHeaderSource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_header import IoArgoprojWorkflowV1alpha1Header +from argo_workflows.models.io_argoproj_workflow_v1alpha1_histogram import IoArgoprojWorkflowV1alpha1Histogram +from argo_workflows.models.io_argoproj_workflow_v1alpha1_info_response import IoArgoprojWorkflowV1alpha1InfoResponse +from argo_workflows.models.io_argoproj_workflow_v1alpha1_inputs import IoArgoprojWorkflowV1alpha1Inputs +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_keys import IoArgoprojWorkflowV1alpha1LabelKeys +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_value_from import IoArgoprojWorkflowV1alpha1LabelValueFrom +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_values import IoArgoprojWorkflowV1alpha1LabelValues +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook +from argo_workflows.models.io_argoproj_workflow_v1alpha1_link import IoArgoprojWorkflowV1alpha1Link +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lint_cron_workflow_request import IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_log_entry import IoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.models.io_argoproj_workflow_v1alpha1_manifest_from import IoArgoprojWorkflowV1alpha1ManifestFrom +from argo_workflows.models.io_argoproj_workflow_v1alpha1_memoization_status import IoArgoprojWorkflowV1alpha1MemoizationStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_memoize import IoArgoprojWorkflowV1alpha1Memoize +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metric_label import IoArgoprojWorkflowV1alpha1MetricLabel +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metrics import IoArgoprojWorkflowV1alpha1Metrics +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex import IoArgoprojWorkflowV1alpha1Mutex +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex_holding import IoArgoprojWorkflowV1alpha1MutexHolding +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex_status import IoArgoprojWorkflowV1alpha1MutexStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_flag import IoArgoprojWorkflowV1alpha1NodeFlag +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_result import IoArgoprojWorkflowV1alpha1NodeResult +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_status import IoArgoprojWorkflowV1alpha1NodeStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_synchronization_status import IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_o_auth2_auth import IoArgoprojWorkflowV1alpha1OAuth2Auth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param import IoArgoprojWorkflowV1alpha1OAuth2EndpointParam +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact_repository import IoArgoprojWorkflowV1alpha1OSSArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_lifecycle_rule import IoArgoprojWorkflowV1alpha1OSSLifecycleRule +from argo_workflows.models.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parallel_steps import IoArgoprojWorkflowV1alpha1ParallelSteps +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter +from argo_workflows.models.io_argoproj_workflow_v1alpha1_pod_gc import IoArgoprojWorkflowV1alpha1PodGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_prometheus import IoArgoprojWorkflowV1alpha1Prometheus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_resource_template import IoArgoprojWorkflowV1alpha1ResourceTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request import IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_affinity import IoArgoprojWorkflowV1alpha1RetryAffinity +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_archived_workflow_request import IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_strategy import IoArgoprojWorkflowV1alpha1RetryStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact_repository import IoArgoprojWorkflowV1alpha1S3ArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_encryption_options import IoArgoprojWorkflowV1alpha1S3EncryptionOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_script_template import IoArgoprojWorkflowV1alpha1ScriptTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_holding import IoArgoprojWorkflowV1alpha1SemaphoreHolding +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_ref import IoArgoprojWorkflowV1alpha1SemaphoreRef +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_status import IoArgoprojWorkflowV1alpha1SemaphoreStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_sequence import IoArgoprojWorkflowV1alpha1Sequence +from argo_workflows.models.io_argoproj_workflow_v1alpha1_stop_strategy import IoArgoprojWorkflowV1alpha1StopStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_submit import IoArgoprojWorkflowV1alpha1Submit +from argo_workflows.models.io_argoproj_workflow_v1alpha1_submit_opts import IoArgoprojWorkflowV1alpha1SubmitOpts +from argo_workflows.models.io_argoproj_workflow_v1alpha1_suspend_template import IoArgoprojWorkflowV1alpha1SuspendTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_synchronization import IoArgoprojWorkflowV1alpha1Synchronization +from argo_workflows.models.io_argoproj_workflow_v1alpha1_synchronization_status import IoArgoprojWorkflowV1alpha1SynchronizationStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_ttl_strategy import IoArgoprojWorkflowV1alpha1TTLStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_tar_strategy import IoArgoprojWorkflowV1alpha1TarStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef +from argo_workflows.models.io_argoproj_workflow_v1alpha1_transformation_step import IoArgoprojWorkflowV1alpha1TransformationStep +from argo_workflows.models.io_argoproj_workflow_v1alpha1_update_cron_workflow_request import IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_user_container import IoArgoprojWorkflowV1alpha1UserContainer +from argo_workflows.models.io_argoproj_workflow_v1alpha1_value_from import IoArgoprojWorkflowV1alpha1ValueFrom +from argo_workflows.models.io_argoproj_workflow_v1alpha1_version import IoArgoprojWorkflowV1alpha1Version +from argo_workflows.models.io_argoproj_workflow_v1alpha1_volume_claim_gc import IoArgoprojWorkflowV1alpha1VolumeClaimGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_create_request import IoArgoprojWorkflowV1alpha1WorkflowCreateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding import IoArgoprojWorkflowV1alpha1WorkflowEventBinding +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding_list import IoArgoprojWorkflowV1alpha1WorkflowEventBindingList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding_spec import IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc import IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_lint_request import IoArgoprojWorkflowV1alpha1WorkflowLintRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_metadata import IoArgoprojWorkflowV1alpha1WorkflowMetadata +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resubmit_request import IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resume_request import IoArgoprojWorkflowV1alpha1WorkflowResumeRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_retry_request import IoArgoprojWorkflowV1alpha1WorkflowRetryRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_set_request import IoArgoprojWorkflowV1alpha1WorkflowSetRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_status import IoArgoprojWorkflowV1alpha1WorkflowStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_step import IoArgoprojWorkflowV1alpha1WorkflowStep +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_stop_request import IoArgoprojWorkflowV1alpha1WorkflowStopRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_submit_request import IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_suspend_request import IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_task_set_spec import IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_task_set_status import IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_create_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_list import IoArgoprojWorkflowV1alpha1WorkflowTemplateList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_ref import IoArgoprojWorkflowV1alpha1WorkflowTemplateRef +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_update_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_terminate_request import IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_watch_event import IoArgoprojWorkflowV1alpha1WorkflowWatchEvent +from argo_workflows.models.io_k8s_api_policy_v1_pod_disruption_budget_spec import IoK8sApiPolicyV1PodDisruptionBudgetSpec +from argo_workflows.models.key_to_path import KeyToPath +from argo_workflows.models.label_selector import LabelSelector +from argo_workflows.models.label_selector_requirement import LabelSelectorRequirement +from argo_workflows.models.lifecycle import Lifecycle +from argo_workflows.models.lifecycle_handler import LifecycleHandler +from argo_workflows.models.list_meta import ListMeta +from argo_workflows.models.local_object_reference import LocalObjectReference +from argo_workflows.models.managed_fields_entry import ManagedFieldsEntry +from argo_workflows.models.nfs_volume_source import NFSVolumeSource +from argo_workflows.models.node_affinity import NodeAffinity +from argo_workflows.models.node_selector import NodeSelector +from argo_workflows.models.node_selector_requirement import NodeSelectorRequirement +from argo_workflows.models.node_selector_term import NodeSelectorTerm +from argo_workflows.models.object_field_selector import ObjectFieldSelector +from argo_workflows.models.object_meta import ObjectMeta +from argo_workflows.models.object_reference import ObjectReference +from argo_workflows.models.owner_reference import OwnerReference +from argo_workflows.models.persistent_volume_claim import PersistentVolumeClaim +from argo_workflows.models.persistent_volume_claim_condition import PersistentVolumeClaimCondition +from argo_workflows.models.persistent_volume_claim_spec import PersistentVolumeClaimSpec +from argo_workflows.models.persistent_volume_claim_status import PersistentVolumeClaimStatus +from argo_workflows.models.persistent_volume_claim_template import PersistentVolumeClaimTemplate +from argo_workflows.models.persistent_volume_claim_volume_source import PersistentVolumeClaimVolumeSource +from argo_workflows.models.photon_persistent_disk_volume_source import PhotonPersistentDiskVolumeSource +from argo_workflows.models.pod_affinity import PodAffinity +from argo_workflows.models.pod_affinity_term import PodAffinityTerm +from argo_workflows.models.pod_anti_affinity import PodAntiAffinity +from argo_workflows.models.pod_dns_config import PodDNSConfig +from argo_workflows.models.pod_dns_config_option import PodDNSConfigOption +from argo_workflows.models.pod_security_context import PodSecurityContext +from argo_workflows.models.portworx_volume_source import PortworxVolumeSource +from argo_workflows.models.preferred_scheduling_term import PreferredSchedulingTerm +from argo_workflows.models.probe import Probe +from argo_workflows.models.projected_volume_source import ProjectedVolumeSource +from argo_workflows.models.quobyte_volume_source import QuobyteVolumeSource +from argo_workflows.models.rbd_volume_source import RBDVolumeSource +from argo_workflows.models.resource_field_selector import ResourceFieldSelector +from argo_workflows.models.resource_requirements import ResourceRequirements +from argo_workflows.models.se_linux_options import SELinuxOptions +from argo_workflows.models.scale_io_volume_source import ScaleIOVolumeSource +from argo_workflows.models.seccomp_profile import SeccompProfile +from argo_workflows.models.secret_env_source import SecretEnvSource +from argo_workflows.models.secret_key_selector import SecretKeySelector +from argo_workflows.models.secret_projection import SecretProjection +from argo_workflows.models.secret_volume_source import SecretVolumeSource +from argo_workflows.models.security_context import SecurityContext +from argo_workflows.models.sensor_create_sensor_request import SensorCreateSensorRequest +from argo_workflows.models.sensor_log_entry import SensorLogEntry +from argo_workflows.models.sensor_sensor_watch_event import SensorSensorWatchEvent +from argo_workflows.models.sensor_update_sensor_request import SensorUpdateSensorRequest +from argo_workflows.models.service_account_token_projection import ServiceAccountTokenProjection +from argo_workflows.models.service_port import ServicePort +from argo_workflows.models.status_cause import StatusCause +from argo_workflows.models.storage_os_volume_source import StorageOSVolumeSource +from argo_workflows.models.stream_result_of_event import StreamResultOfEvent +from argo_workflows.models.stream_result_of_eventsource_event_source_watch_event import StreamResultOfEventsourceEventSourceWatchEvent +from argo_workflows.models.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event import StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent +from argo_workflows.models.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry +from argo_workflows.models.stream_result_of_sensor_sensor_watch_event import StreamResultOfSensorSensorWatchEvent +from argo_workflows.models.sysctl import Sysctl +from argo_workflows.models.tcp_socket_action import TCPSocketAction +from argo_workflows.models.toleration import Toleration +from argo_workflows.models.typed_local_object_reference import TypedLocalObjectReference +from argo_workflows.models.volume import Volume +from argo_workflows.models.volume_device import VolumeDevice +from argo_workflows.models.volume_mount import VolumeMount +from argo_workflows.models.volume_projection import VolumeProjection +from argo_workflows.models.vsphere_virtual_disk_volume_source import VsphereVirtualDiskVolumeSource +from argo_workflows.models.weighted_pod_affinity_term import WeightedPodAffinityTerm +from argo_workflows.models.windows_security_context_options import WindowsSecurityContextOptions diff --git a/sdks/python/client/argo_workflows/models/affinity.py b/sdks/python/client/argo_workflows/models/affinity.py new file mode 100644 index 000000000000..e8083527b6cf --- /dev/null +++ b/sdks/python/client/argo_workflows/models/affinity.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.node_affinity import NodeAffinity +from argo_workflows.models.pod_affinity import PodAffinity +from argo_workflows.models.pod_anti_affinity import PodAntiAffinity +from typing import Optional, Set +from typing_extensions import Self + +class Affinity(BaseModel): + """ + Affinity is a group of affinity scheduling rules. + """ # noqa: E501 + node_affinity: Optional[NodeAffinity] = Field(default=None, alias="nodeAffinity") + pod_affinity: Optional[PodAffinity] = Field(default=None, alias="podAffinity") + pod_anti_affinity: Optional[PodAntiAffinity] = Field(default=None, alias="podAntiAffinity") + __properties: ClassVar[List[str]] = ["nodeAffinity", "podAffinity", "podAntiAffinity"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Affinity from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node_affinity + if self.node_affinity: + _dict['nodeAffinity'] = self.node_affinity.to_dict() + # override the default output from pydantic by calling `to_dict()` of pod_affinity + if self.pod_affinity: + _dict['podAffinity'] = self.pod_affinity.to_dict() + # override the default output from pydantic by calling `to_dict()` of pod_anti_affinity + if self.pod_anti_affinity: + _dict['podAntiAffinity'] = self.pod_anti_affinity.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Affinity from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeAffinity": NodeAffinity.from_dict(obj["nodeAffinity"]) if obj.get("nodeAffinity") is not None else None, + "podAffinity": PodAffinity.from_dict(obj["podAffinity"]) if obj.get("podAffinity") is not None else None, + "podAntiAffinity": PodAntiAffinity.from_dict(obj["podAntiAffinity"]) if obj.get("podAntiAffinity") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/aws_elastic_block_store_volume_source.py b/sdks/python/client/argo_workflows/models/aws_elastic_block_store_volume_source.py new file mode 100644 index 000000000000..e1bf5103655e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/aws_elastic_block_store_volume_source.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AWSElasticBlockStoreVolumeSource(BaseModel): + """ + Represents a Persistent Disk resource in AWS. An AWS EBS disk must exist before mounting to a container. The disk must also be in the same AWS zone as the kubelet. An AWS EBS disk can only be mounted as read/write once. AWS EBS volumes support ownership management and SELinux relabeling. + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", alias="fsType") + partition: Optional[StrictInt] = Field(default=None, description="The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty).") + read_only: Optional[StrictBool] = Field(default=None, description="Specify \"true\" to force and set the ReadOnly property in VolumeMounts to \"true\". If omitted, the default is \"false\". More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", alias="readOnly") + volume_id: StrictStr = Field(description="Unique ID of the persistent disk resource in AWS (Amazon EBS volume). More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", alias="volumeID") + __properties: ClassVar[List[str]] = ["fsType", "partition", "readOnly", "volumeID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AWSElasticBlockStoreVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AWSElasticBlockStoreVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "partition": obj.get("partition"), + "readOnly": obj.get("readOnly"), + "volumeID": obj.get("volumeID") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/azure_disk_volume_source.py b/sdks/python/client/argo_workflows/models/azure_disk_volume_source.py new file mode 100644 index 000000000000..372442205ce1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/azure_disk_volume_source.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AzureDiskVolumeSource(BaseModel): + """ + AzureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. + """ # noqa: E501 + caching_mode: Optional[StrictStr] = Field(default=None, description="Host Caching mode: None, Read Only, Read Write.", alias="cachingMode") + disk_name: StrictStr = Field(description="The Name of the data disk in the blob storage", alias="diskName") + disk_uri: StrictStr = Field(description="The URI the data disk in the blob storage", alias="diskURI") + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.", alias="fsType") + kind: Optional[StrictStr] = Field(default=None, description="Expected values Shared: multiple blob disks per storage account Dedicated: single blob disk per storage account Managed: azure managed data disk (only in managed availability set). defaults to shared") + read_only: Optional[StrictBool] = Field(default=None, description="Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.", alias="readOnly") + __properties: ClassVar[List[str]] = ["cachingMode", "diskName", "diskURI", "fsType", "kind", "readOnly"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AzureDiskVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AzureDiskVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cachingMode": obj.get("cachingMode"), + "diskName": obj.get("diskName"), + "diskURI": obj.get("diskURI"), + "fsType": obj.get("fsType"), + "kind": obj.get("kind"), + "readOnly": obj.get("readOnly") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/azure_file_volume_source.py b/sdks/python/client/argo_workflows/models/azure_file_volume_source.py new file mode 100644 index 000000000000..4070657a8cc6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/azure_file_volume_source.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AzureFileVolumeSource(BaseModel): + """ + AzureFile represents an Azure File Service mount on the host and bind mount to the pod. + """ # noqa: E501 + read_only: Optional[StrictBool] = Field(default=None, description="Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.", alias="readOnly") + secret_name: StrictStr = Field(description="the name of secret that contains Azure Storage Account Name and Key", alias="secretName") + share_name: StrictStr = Field(description="Share Name", alias="shareName") + __properties: ClassVar[List[str]] = ["readOnly", "secretName", "shareName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AzureFileVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AzureFileVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "readOnly": obj.get("readOnly"), + "secretName": obj.get("secretName"), + "shareName": obj.get("shareName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/capabilities.py b/sdks/python/client/argo_workflows/models/capabilities.py new file mode 100644 index 000000000000..8392ecef39e7 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/capabilities.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Capabilities(BaseModel): + """ + Adds and removes POSIX capabilities from running containers. + """ # noqa: E501 + add: Optional[List[StrictStr]] = Field(default=None, description="Added capabilities") + drop: Optional[List[StrictStr]] = Field(default=None, description="Removed capabilities") + __properties: ClassVar[List[str]] = ["add", "drop"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Capabilities from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Capabilities from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "add": obj.get("add"), + "drop": obj.get("drop") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/ceph_fs_volume_source.py b/sdks/python/client/argo_workflows/models/ceph_fs_volume_source.py new file mode 100644 index 000000000000..7f308a5f27ad --- /dev/null +++ b/sdks/python/client/argo_workflows/models/ceph_fs_volume_source.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.local_object_reference import LocalObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class CephFSVolumeSource(BaseModel): + """ + Represents a Ceph Filesystem mount that lasts the lifetime of a pod Cephfs volumes do not support ownership management or SELinux relabeling. + """ # noqa: E501 + monitors: List[StrictStr] = Field(description="Required: Monitors is a collection of Ceph monitors More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it") + path: Optional[StrictStr] = Field(default=None, description="Optional: Used as the mounted root, rather than the full Ceph tree, default is /") + read_only: Optional[StrictBool] = Field(default=None, description="Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", alias="readOnly") + secret_file: Optional[StrictStr] = Field(default=None, description="Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", alias="secretFile") + secret_ref: Optional[LocalObjectReference] = Field(default=None, alias="secretRef") + user: Optional[StrictStr] = Field(default=None, description="Optional: User is the rados user name, default is admin More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it") + __properties: ClassVar[List[str]] = ["monitors", "path", "readOnly", "secretFile", "secretRef", "user"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CephFSVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of secret_ref + if self.secret_ref: + _dict['secretRef'] = self.secret_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CephFSVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "monitors": obj.get("monitors"), + "path": obj.get("path"), + "readOnly": obj.get("readOnly"), + "secretFile": obj.get("secretFile"), + "secretRef": LocalObjectReference.from_dict(obj["secretRef"]) if obj.get("secretRef") is not None else None, + "user": obj.get("user") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/cinder_volume_source.py b/sdks/python/client/argo_workflows/models/cinder_volume_source.py new file mode 100644 index 000000000000..65dea612f03e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/cinder_volume_source.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.local_object_reference import LocalObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class CinderVolumeSource(BaseModel): + """ + Represents a cinder volume resource in Openstack. A Cinder volume must exist before mounting to a container. The volume must also be in the same region as the kubelet. Cinder volumes support ownership management and SELinux relabeling. + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type to mount. Must be a filesystem type supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://examples.k8s.io/mysql-cinder-pd/README.md", alias="fsType") + read_only: Optional[StrictBool] = Field(default=None, description="Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/mysql-cinder-pd/README.md", alias="readOnly") + secret_ref: Optional[LocalObjectReference] = Field(default=None, alias="secretRef") + volume_id: StrictStr = Field(description="volume id used to identify the volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md", alias="volumeID") + __properties: ClassVar[List[str]] = ["fsType", "readOnly", "secretRef", "volumeID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CinderVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of secret_ref + if self.secret_ref: + _dict['secretRef'] = self.secret_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CinderVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "readOnly": obj.get("readOnly"), + "secretRef": LocalObjectReference.from_dict(obj["secretRef"]) if obj.get("secretRef") is not None else None, + "volumeID": obj.get("volumeID") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/config_map_env_source.py b/sdks/python/client/argo_workflows/models/config_map_env_source.py new file mode 100644 index 000000000000..cfd881866fbd --- /dev/null +++ b/sdks/python/client/argo_workflows/models/config_map_env_source.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigMapEnvSource(BaseModel): + """ + ConfigMapEnvSource selects a ConfigMap to populate the environment variables with. The contents of the target ConfigMap's Data field will represent the key-value pairs as environment variables. + """ # noqa: E501 + name: Optional[StrictStr] = Field(default=None, description="Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + optional: Optional[StrictBool] = Field(default=None, description="Specify whether the ConfigMap must be defined") + __properties: ClassVar[List[str]] = ["name", "optional"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigMapEnvSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigMapEnvSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "optional": obj.get("optional") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/config_map_key_selector.py b/sdks/python/client/argo_workflows/models/config_map_key_selector.py new file mode 100644 index 000000000000..85f57dec6f06 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/config_map_key_selector.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigMapKeySelector(BaseModel): + """ + Selects a key from a ConfigMap. + """ # noqa: E501 + key: StrictStr = Field(description="The key to select.") + name: Optional[StrictStr] = Field(default=None, description="Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + optional: Optional[StrictBool] = Field(default=None, description="Specify whether the ConfigMap or its key must be defined") + __properties: ClassVar[List[str]] = ["key", "name", "optional"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigMapKeySelector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigMapKeySelector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "name": obj.get("name"), + "optional": obj.get("optional") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/config_map_projection.py b/sdks/python/client/argo_workflows/models/config_map_projection.py new file mode 100644 index 000000000000..62012931a27a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/config_map_projection.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.key_to_path import KeyToPath +from typing import Optional, Set +from typing_extensions import Self + +class ConfigMapProjection(BaseModel): + """ + Adapts a ConfigMap into a projected volume. The contents of the target ConfigMap's Data field will be presented in a projected volume as files using the keys in the Data field as the file names, unless the items element is populated with specific mappings of keys to paths. Note that this is identical to a configmap volume source without the default mode. + """ # noqa: E501 + items: Optional[List[KeyToPath]] = Field(default=None, description="If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.") + name: Optional[StrictStr] = Field(default=None, description="Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + optional: Optional[StrictBool] = Field(default=None, description="Specify whether the ConfigMap or its keys must be defined") + __properties: ClassVar[List[str]] = ["items", "name", "optional"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigMapProjection from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigMapProjection from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "items": [KeyToPath.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "name": obj.get("name"), + "optional": obj.get("optional") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/config_map_volume_source.py b/sdks/python/client/argo_workflows/models/config_map_volume_source.py new file mode 100644 index 000000000000..66d25cb801e9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/config_map_volume_source.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.key_to_path import KeyToPath +from typing import Optional, Set +from typing_extensions import Self + +class ConfigMapVolumeSource(BaseModel): + """ + Adapts a ConfigMap into a volume. The contents of the target ConfigMap's Data field will be presented in a volume as files using the keys in the Data field as the file names, unless the items element is populated with specific mappings of keys to paths. ConfigMap volumes support ownership management and SELinux relabeling. + """ # noqa: E501 + default_mode: Optional[StrictInt] = Field(default=None, description="Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.", alias="defaultMode") + items: Optional[List[KeyToPath]] = Field(default=None, description="If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.") + name: Optional[StrictStr] = Field(default=None, description="Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + optional: Optional[StrictBool] = Field(default=None, description="Specify whether the ConfigMap or its keys must be defined") + __properties: ClassVar[List[str]] = ["defaultMode", "items", "name", "optional"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigMapVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigMapVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "defaultMode": obj.get("defaultMode"), + "items": [KeyToPath.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "name": obj.get("name"), + "optional": obj.get("optional") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/container.py b/sdks/python/client/argo_workflows/models/container.py new file mode 100644 index 000000000000..f3aa0720fcff --- /dev/null +++ b/sdks/python/client/argo_workflows/models/container.py @@ -0,0 +1,211 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.container_port import ContainerPort +from argo_workflows.models.env_from_source import EnvFromSource +from argo_workflows.models.env_var import EnvVar +from argo_workflows.models.lifecycle import Lifecycle +from argo_workflows.models.probe import Probe +from argo_workflows.models.resource_requirements import ResourceRequirements +from argo_workflows.models.security_context import SecurityContext +from argo_workflows.models.volume_device import VolumeDevice +from argo_workflows.models.volume_mount import VolumeMount +from typing import Optional, Set +from typing_extensions import Self + +class Container(BaseModel): + """ + A single application container that you want to run within a pod. + """ # noqa: E501 + args: Optional[List[StrictStr]] = Field(default=None, description="Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell") + command: Optional[List[StrictStr]] = Field(default=None, description="Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell") + env: Optional[List[EnvVar]] = Field(default=None, description="List of environment variables to set in the container. Cannot be updated.") + env_from: Optional[List[EnvFromSource]] = Field(default=None, description="List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.", alias="envFrom") + image: StrictStr = Field(description="Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.") + image_pull_policy: Optional[StrictStr] = Field(default=None, description="Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images Possible enum values: - `\"Always\"` means that kubelet always attempts to pull the latest image. Container will fail If the pull fails. - `\"IfNotPresent\"` means that kubelet pulls if the image isn't present on disk. Container will fail if the image isn't present and the pull fails. - `\"Never\"` means that kubelet never pulls an image, but only uses a local image. Container will fail if the image isn't present", alias="imagePullPolicy") + lifecycle: Optional[Lifecycle] = None + liveness_probe: Optional[Probe] = Field(default=None, alias="livenessProbe") + name: Optional[StrictStr] = Field(default=None, description="Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated.") + ports: Optional[List[ContainerPort]] = Field(default=None, description="List of ports to expose from the container. Exposing a port here gives the system additional information about the network connections a container uses, but is primarily informational. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Cannot be updated.") + readiness_probe: Optional[Probe] = Field(default=None, alias="readinessProbe") + resources: Optional[ResourceRequirements] = None + security_context: Optional[SecurityContext] = Field(default=None, alias="securityContext") + startup_probe: Optional[Probe] = Field(default=None, alias="startupProbe") + stdin: Optional[StrictBool] = Field(default=None, description="Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.") + stdin_once: Optional[StrictBool] = Field(default=None, description="Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false", alias="stdinOnce") + termination_message_path: Optional[StrictStr] = Field(default=None, description="Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.", alias="terminationMessagePath") + termination_message_policy: Optional[StrictStr] = Field(default=None, description="Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. Possible enum values: - `\"FallbackToLogsOnError\"` will read the most recent contents of the container logs for the container status message when the container exits with an error and the terminationMessagePath has no contents. - `\"File\"` is the default behavior and will set the container status message to the contents of the container's terminationMessagePath when the container exits.", alias="terminationMessagePolicy") + tty: Optional[StrictBool] = Field(default=None, description="Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.") + volume_devices: Optional[List[VolumeDevice]] = Field(default=None, description="volumeDevices is the list of block devices to be used by the container.", alias="volumeDevices") + volume_mounts: Optional[List[VolumeMount]] = Field(default=None, description="Pod volumes to mount into the container's filesystem. Cannot be updated.", alias="volumeMounts") + working_dir: Optional[StrictStr] = Field(default=None, description="Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.", alias="workingDir") + __properties: ClassVar[List[str]] = ["args", "command", "env", "envFrom", "image", "imagePullPolicy", "lifecycle", "livenessProbe", "name", "ports", "readinessProbe", "resources", "securityContext", "startupProbe", "stdin", "stdinOnce", "terminationMessagePath", "terminationMessagePolicy", "tty", "volumeDevices", "volumeMounts", "workingDir"] + + @field_validator('image_pull_policy') + def image_pull_policy_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Always', 'IfNotPresent', 'Never']): + raise ValueError("must be one of enum values ('Always', 'IfNotPresent', 'Never')") + return value + + @field_validator('termination_message_policy') + def termination_message_policy_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['FallbackToLogsOnError', 'File']): + raise ValueError("must be one of enum values ('FallbackToLogsOnError', 'File')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Container from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in env (list) + _items = [] + if self.env: + for _item in self.env: + if _item: + _items.append(_item.to_dict()) + _dict['env'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in env_from (list) + _items = [] + if self.env_from: + for _item in self.env_from: + if _item: + _items.append(_item.to_dict()) + _dict['envFrom'] = _items + # override the default output from pydantic by calling `to_dict()` of lifecycle + if self.lifecycle: + _dict['lifecycle'] = self.lifecycle.to_dict() + # override the default output from pydantic by calling `to_dict()` of liveness_probe + if self.liveness_probe: + _dict['livenessProbe'] = self.liveness_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in ports (list) + _items = [] + if self.ports: + for _item in self.ports: + if _item: + _items.append(_item.to_dict()) + _dict['ports'] = _items + # override the default output from pydantic by calling `to_dict()` of readiness_probe + if self.readiness_probe: + _dict['readinessProbe'] = self.readiness_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of resources + if self.resources: + _dict['resources'] = self.resources.to_dict() + # override the default output from pydantic by calling `to_dict()` of security_context + if self.security_context: + _dict['securityContext'] = self.security_context.to_dict() + # override the default output from pydantic by calling `to_dict()` of startup_probe + if self.startup_probe: + _dict['startupProbe'] = self.startup_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in volume_devices (list) + _items = [] + if self.volume_devices: + for _item in self.volume_devices: + if _item: + _items.append(_item.to_dict()) + _dict['volumeDevices'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in volume_mounts (list) + _items = [] + if self.volume_mounts: + for _item in self.volume_mounts: + if _item: + _items.append(_item.to_dict()) + _dict['volumeMounts'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Container from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "args": obj.get("args"), + "command": obj.get("command"), + "env": [EnvVar.from_dict(_item) for _item in obj["env"]] if obj.get("env") is not None else None, + "envFrom": [EnvFromSource.from_dict(_item) for _item in obj["envFrom"]] if obj.get("envFrom") is not None else None, + "image": obj.get("image"), + "imagePullPolicy": obj.get("imagePullPolicy"), + "lifecycle": Lifecycle.from_dict(obj["lifecycle"]) if obj.get("lifecycle") is not None else None, + "livenessProbe": Probe.from_dict(obj["livenessProbe"]) if obj.get("livenessProbe") is not None else None, + "name": obj.get("name"), + "ports": [ContainerPort.from_dict(_item) for _item in obj["ports"]] if obj.get("ports") is not None else None, + "readinessProbe": Probe.from_dict(obj["readinessProbe"]) if obj.get("readinessProbe") is not None else None, + "resources": ResourceRequirements.from_dict(obj["resources"]) if obj.get("resources") is not None else None, + "securityContext": SecurityContext.from_dict(obj["securityContext"]) if obj.get("securityContext") is not None else None, + "startupProbe": Probe.from_dict(obj["startupProbe"]) if obj.get("startupProbe") is not None else None, + "stdin": obj.get("stdin"), + "stdinOnce": obj.get("stdinOnce"), + "terminationMessagePath": obj.get("terminationMessagePath"), + "terminationMessagePolicy": obj.get("terminationMessagePolicy"), + "tty": obj.get("tty"), + "volumeDevices": [VolumeDevice.from_dict(_item) for _item in obj["volumeDevices"]] if obj.get("volumeDevices") is not None else None, + "volumeMounts": [VolumeMount.from_dict(_item) for _item in obj["volumeMounts"]] if obj.get("volumeMounts") is not None else None, + "workingDir": obj.get("workingDir") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/container_port.py b/sdks/python/client/argo_workflows/models/container_port.py new file mode 100644 index 000000000000..ea394353cfbb --- /dev/null +++ b/sdks/python/client/argo_workflows/models/container_port.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ContainerPort(BaseModel): + """ + ContainerPort represents a network port in a single container. + """ # noqa: E501 + container_port: StrictInt = Field(description="Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536.", alias="containerPort") + host_ip: Optional[StrictStr] = Field(default=None, description="What host IP to bind the external port to.", alias="hostIP") + host_port: Optional[StrictInt] = Field(default=None, description="Number of port to expose on the host. If specified, this must be a valid port number, 0 < x < 65536. If HostNetwork is specified, this must match ContainerPort. Most containers do not need this.", alias="hostPort") + name: Optional[StrictStr] = Field(default=None, description="If specified, this must be an IANA_SVC_NAME and unique within the pod. Each named port in a pod must have a unique name. Name for the port that can be referred to by services.") + protocol: Optional[StrictStr] = Field(default=None, description="Protocol for port. Must be UDP, TCP, or SCTP. Defaults to \"TCP\". Possible enum values: - `\"SCTP\"` is the SCTP protocol. - `\"TCP\"` is the TCP protocol. - `\"UDP\"` is the UDP protocol.") + __properties: ClassVar[List[str]] = ["containerPort", "hostIP", "hostPort", "name", "protocol"] + + @field_validator('protocol') + def protocol_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['SCTP', 'TCP', 'UDP']): + raise ValueError("must be one of enum values ('SCTP', 'TCP', 'UDP')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ContainerPort from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ContainerPort from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "containerPort": obj.get("containerPort"), + "hostIP": obj.get("hostIP"), + "hostPort": obj.get("hostPort"), + "name": obj.get("name"), + "protocol": obj.get("protocol") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/create_options.py b/sdks/python/client/argo_workflows/models/create_options.py new file mode 100644 index 000000000000..e54d7ce1f5bb --- /dev/null +++ b/sdks/python/client/argo_workflows/models/create_options.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class CreateOptions(BaseModel): + """ + CreateOptions may be provided when creating an API object. + """ # noqa: E501 + dry_run: Optional[List[StrictStr]] = Field(default=None, alias="dryRun") + field_manager: Optional[StrictStr] = Field(default=None, alias="fieldManager") + field_validation: Optional[StrictStr] = Field(default=None, alias="fieldValidation") + __properties: ClassVar[List[str]] = ["dryRun", "fieldManager", "fieldValidation"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "dryRun": obj.get("dryRun"), + "fieldManager": obj.get("fieldManager"), + "fieldValidation": obj.get("fieldValidation") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/csi_volume_source.py b/sdks/python/client/argo_workflows/models/csi_volume_source.py new file mode 100644 index 000000000000..e5a75e20b46d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/csi_volume_source.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.local_object_reference import LocalObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class CSIVolumeSource(BaseModel): + """ + Represents a source location of a volume to mount, managed by an external CSI driver + """ # noqa: E501 + driver: StrictStr = Field(description="Driver is the name of the CSI driver that handles this volume. Consult with your admin for the correct name as registered in the cluster.") + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type to mount. Ex. \"ext4\", \"xfs\", \"ntfs\". If not provided, the empty value is passed to the associated CSI driver which will determine the default filesystem to apply.", alias="fsType") + node_publish_secret_ref: Optional[LocalObjectReference] = Field(default=None, alias="nodePublishSecretRef") + read_only: Optional[StrictBool] = Field(default=None, description="Specifies a read-only configuration for the volume. Defaults to false (read/write).", alias="readOnly") + volume_attributes: Optional[Dict[str, StrictStr]] = Field(default=None, description="VolumeAttributes stores driver-specific properties that are passed to the CSI driver. Consult your driver's documentation for supported values.", alias="volumeAttributes") + __properties: ClassVar[List[str]] = ["driver", "fsType", "nodePublishSecretRef", "readOnly", "volumeAttributes"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CSIVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node_publish_secret_ref + if self.node_publish_secret_ref: + _dict['nodePublishSecretRef'] = self.node_publish_secret_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CSIVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "driver": obj.get("driver"), + "fsType": obj.get("fsType"), + "nodePublishSecretRef": LocalObjectReference.from_dict(obj["nodePublishSecretRef"]) if obj.get("nodePublishSecretRef") is not None else None, + "readOnly": obj.get("readOnly"), + "volumeAttributes": obj.get("volumeAttributes") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/downward_api_projection.py b/sdks/python/client/argo_workflows/models/downward_api_projection.py new file mode 100644 index 000000000000..7c61d7ca1e82 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/downward_api_projection.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.downward_api_volume_file import DownwardAPIVolumeFile +from typing import Optional, Set +from typing_extensions import Self + +class DownwardAPIProjection(BaseModel): + """ + Represents downward API info for projecting into a projected volume. Note that this is identical to a downwardAPI volume source without the default mode. + """ # noqa: E501 + items: Optional[List[DownwardAPIVolumeFile]] = Field(default=None, description="Items is a list of DownwardAPIVolume file") + __properties: ClassVar[List[str]] = ["items"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DownwardAPIProjection from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DownwardAPIProjection from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "items": [DownwardAPIVolumeFile.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/downward_api_volume_file.py b/sdks/python/client/argo_workflows/models/downward_api_volume_file.py new file mode 100644 index 000000000000..94d8bbf9b67e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/downward_api_volume_file.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.object_field_selector import ObjectFieldSelector +from argo_workflows.models.resource_field_selector import ResourceFieldSelector +from typing import Optional, Set +from typing_extensions import Self + +class DownwardAPIVolumeFile(BaseModel): + """ + DownwardAPIVolumeFile represents information to create the file containing the pod field + """ # noqa: E501 + field_ref: Optional[ObjectFieldSelector] = Field(default=None, alias="fieldRef") + mode: Optional[StrictInt] = Field(default=None, description="Optional: mode bits used to set permissions on this file, must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.") + path: StrictStr = Field(description="Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'") + resource_field_ref: Optional[ResourceFieldSelector] = Field(default=None, alias="resourceFieldRef") + __properties: ClassVar[List[str]] = ["fieldRef", "mode", "path", "resourceFieldRef"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DownwardAPIVolumeFile from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of field_ref + if self.field_ref: + _dict['fieldRef'] = self.field_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of resource_field_ref + if self.resource_field_ref: + _dict['resourceFieldRef'] = self.resource_field_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DownwardAPIVolumeFile from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fieldRef": ObjectFieldSelector.from_dict(obj["fieldRef"]) if obj.get("fieldRef") is not None else None, + "mode": obj.get("mode"), + "path": obj.get("path"), + "resourceFieldRef": ResourceFieldSelector.from_dict(obj["resourceFieldRef"]) if obj.get("resourceFieldRef") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/downward_api_volume_source.py b/sdks/python/client/argo_workflows/models/downward_api_volume_source.py new file mode 100644 index 000000000000..ea13b03af7b3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/downward_api_volume_source.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.downward_api_volume_file import DownwardAPIVolumeFile +from typing import Optional, Set +from typing_extensions import Self + +class DownwardAPIVolumeSource(BaseModel): + """ + DownwardAPIVolumeSource represents a volume containing downward API info. Downward API volumes support ownership management and SELinux relabeling. + """ # noqa: E501 + default_mode: Optional[StrictInt] = Field(default=None, description="Optional: mode bits to use on created files by default. Must be a Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.", alias="defaultMode") + items: Optional[List[DownwardAPIVolumeFile]] = Field(default=None, description="Items is a list of downward API volume file") + __properties: ClassVar[List[str]] = ["defaultMode", "items"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DownwardAPIVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DownwardAPIVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "defaultMode": obj.get("defaultMode"), + "items": [DownwardAPIVolumeFile.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/duration.py b/sdks/python/client/argo_workflows/models/duration.py new file mode 100644 index 000000000000..03cbe10d656b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/duration.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Duration(BaseModel): + """ + Duration is a wrapper around time.Duration which supports correct marshaling to YAML and JSON. In particular, it marshals into strings, which can be used as map keys in json. + """ # noqa: E501 + duration: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["duration"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Duration from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Duration from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "duration": obj.get("duration") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/empty_dir_volume_source.py b/sdks/python/client/argo_workflows/models/empty_dir_volume_source.py new file mode 100644 index 000000000000..492d60ce2a99 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/empty_dir_volume_source.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class EmptyDirVolumeSource(BaseModel): + """ + Represents an empty directory for a pod. Empty directory volumes support ownership management and SELinux relabeling. + """ # noqa: E501 + medium: Optional[StrictStr] = Field(default=None, description="What type of storage medium should back this directory. The default is \"\" which means to use the node's default medium. Must be an empty string (default) or Memory. More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir") + size_limit: Optional[StrictStr] = Field(default=None, description="Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.", alias="sizeLimit") + __properties: ClassVar[List[str]] = ["medium", "sizeLimit"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EmptyDirVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EmptyDirVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "medium": obj.get("medium"), + "sizeLimit": obj.get("sizeLimit") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/env_from_source.py b/sdks/python/client/argo_workflows/models/env_from_source.py new file mode 100644 index 000000000000..787984eb6643 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/env_from_source.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_env_source import ConfigMapEnvSource +from argo_workflows.models.secret_env_source import SecretEnvSource +from typing import Optional, Set +from typing_extensions import Self + +class EnvFromSource(BaseModel): + """ + EnvFromSource represents the source of a set of ConfigMaps + """ # noqa: E501 + config_map_ref: Optional[ConfigMapEnvSource] = Field(default=None, alias="configMapRef") + prefix: Optional[StrictStr] = Field(default=None, description="An optional identifier to prepend to each key in the ConfigMap. Must be a C_IDENTIFIER.") + secret_ref: Optional[SecretEnvSource] = Field(default=None, alias="secretRef") + __properties: ClassVar[List[str]] = ["configMapRef", "prefix", "secretRef"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EnvFromSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config_map_ref + if self.config_map_ref: + _dict['configMapRef'] = self.config_map_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_ref + if self.secret_ref: + _dict['secretRef'] = self.secret_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EnvFromSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configMapRef": ConfigMapEnvSource.from_dict(obj["configMapRef"]) if obj.get("configMapRef") is not None else None, + "prefix": obj.get("prefix"), + "secretRef": SecretEnvSource.from_dict(obj["secretRef"]) if obj.get("secretRef") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/env_var.py b/sdks/python/client/argo_workflows/models/env_var.py new file mode 100644 index 000000000000..313e3a9e60bb --- /dev/null +++ b/sdks/python/client/argo_workflows/models/env_var.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.env_var_source import EnvVarSource +from typing import Optional, Set +from typing_extensions import Self + +class EnvVar(BaseModel): + """ + EnvVar represents an environment variable present in a Container. + """ # noqa: E501 + name: StrictStr = Field(description="Name of the environment variable. Must be a C_IDENTIFIER.") + value: Optional[StrictStr] = Field(default=None, description="Variable references $(VAR_NAME) are expanded using the previously defined environment variables in the container and any service environment variables. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Defaults to \"\".") + value_from: Optional[EnvVarSource] = Field(default=None, alias="valueFrom") + __properties: ClassVar[List[str]] = ["name", "value", "valueFrom"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EnvVar from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of value_from + if self.value_from: + _dict['valueFrom'] = self.value_from.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EnvVar from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "value": obj.get("value"), + "valueFrom": EnvVarSource.from_dict(obj["valueFrom"]) if obj.get("valueFrom") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/env_var_source.py b/sdks/python/client/argo_workflows/models/env_var_source.py new file mode 100644 index 000000000000..858e6d892203 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/env_var_source.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from argo_workflows.models.object_field_selector import ObjectFieldSelector +from argo_workflows.models.resource_field_selector import ResourceFieldSelector +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class EnvVarSource(BaseModel): + """ + EnvVarSource represents a source for the value of an EnvVar. + """ # noqa: E501 + config_map_key_ref: Optional[ConfigMapKeySelector] = Field(default=None, alias="configMapKeyRef") + field_ref: Optional[ObjectFieldSelector] = Field(default=None, alias="fieldRef") + resource_field_ref: Optional[ResourceFieldSelector] = Field(default=None, alias="resourceFieldRef") + secret_key_ref: Optional[SecretKeySelector] = Field(default=None, alias="secretKeyRef") + __properties: ClassVar[List[str]] = ["configMapKeyRef", "fieldRef", "resourceFieldRef", "secretKeyRef"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EnvVarSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config_map_key_ref + if self.config_map_key_ref: + _dict['configMapKeyRef'] = self.config_map_key_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of field_ref + if self.field_ref: + _dict['fieldRef'] = self.field_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of resource_field_ref + if self.resource_field_ref: + _dict['resourceFieldRef'] = self.resource_field_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_key_ref + if self.secret_key_ref: + _dict['secretKeyRef'] = self.secret_key_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EnvVarSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configMapKeyRef": ConfigMapKeySelector.from_dict(obj["configMapKeyRef"]) if obj.get("configMapKeyRef") is not None else None, + "fieldRef": ObjectFieldSelector.from_dict(obj["fieldRef"]) if obj.get("fieldRef") is not None else None, + "resourceFieldRef": ResourceFieldSelector.from_dict(obj["resourceFieldRef"]) if obj.get("resourceFieldRef") is not None else None, + "secretKeyRef": SecretKeySelector.from_dict(obj["secretKeyRef"]) if obj.get("secretKeyRef") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/ephemeral_volume_source.py b/sdks/python/client/argo_workflows/models/ephemeral_volume_source.py new file mode 100644 index 000000000000..69d9db4db6c6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/ephemeral_volume_source.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.persistent_volume_claim_template import PersistentVolumeClaimTemplate +from typing import Optional, Set +from typing_extensions import Self + +class EphemeralVolumeSource(BaseModel): + """ + Represents an ephemeral volume that is handled by a normal storage driver. + """ # noqa: E501 + volume_claim_template: Optional[PersistentVolumeClaimTemplate] = Field(default=None, alias="volumeClaimTemplate") + __properties: ClassVar[List[str]] = ["volumeClaimTemplate"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EphemeralVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of volume_claim_template + if self.volume_claim_template: + _dict['volumeClaimTemplate'] = self.volume_claim_template.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EphemeralVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "volumeClaimTemplate": PersistentVolumeClaimTemplate.from_dict(obj["volumeClaimTemplate"]) if obj.get("volumeClaimTemplate") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/event.py b/sdks/python/client/argo_workflows/models/event.py new file mode 100644 index 000000000000..455264920e8c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/event.py @@ -0,0 +1,139 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.event_series import EventSeries +from argo_workflows.models.event_source import EventSource +from argo_workflows.models.object_meta import ObjectMeta +from argo_workflows.models.object_reference import ObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class Event(BaseModel): + """ + Event is a report of an event somewhere in the cluster. Events have a limited retention time and triggers and messages may evolve with time. Event consumers should not rely on the timing of an event with a given Reason reflecting a consistent underlying trigger, or the continued existence of events with that Reason. Events should be treated as informative, best-effort, supplemental data. + """ # noqa: E501 + action: Optional[StrictStr] = Field(default=None, description="What action was taken/failed regarding to the Regarding object.") + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + count: Optional[StrictInt] = Field(default=None, description="The number of times this event has occurred.") + event_time: Optional[datetime] = Field(default=None, description="MicroTime is version of Time with microsecond level precision.", alias="eventTime") + first_timestamp: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="firstTimestamp") + involved_object: ObjectReference = Field(alias="involvedObject") + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + last_timestamp: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="lastTimestamp") + message: Optional[StrictStr] = Field(default=None, description="A human-readable description of the status of this operation.") + metadata: ObjectMeta + reason: Optional[StrictStr] = Field(default=None, description="This should be a short, machine understandable string that gives the reason for the transition into the object's current status.") + related: Optional[ObjectReference] = None + reporting_component: Optional[StrictStr] = Field(default=None, description="Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`.", alias="reportingComponent") + reporting_instance: Optional[StrictStr] = Field(default=None, description="ID of the controller instance, e.g. `kubelet-xyzf`.", alias="reportingInstance") + series: Optional[EventSeries] = None + source: Optional[EventSource] = None + type: Optional[StrictStr] = Field(default=None, description="Type of this event (Normal, Warning), new types could be added in the future") + __properties: ClassVar[List[str]] = ["action", "apiVersion", "count", "eventTime", "firstTimestamp", "involvedObject", "kind", "lastTimestamp", "message", "metadata", "reason", "related", "reportingComponent", "reportingInstance", "series", "source", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Event from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of involved_object + if self.involved_object: + _dict['involvedObject'] = self.involved_object.to_dict() + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of related + if self.related: + _dict['related'] = self.related.to_dict() + # override the default output from pydantic by calling `to_dict()` of series + if self.series: + _dict['series'] = self.series.to_dict() + # override the default output from pydantic by calling `to_dict()` of source + if self.source: + _dict['source'] = self.source.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Event from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "action": obj.get("action"), + "apiVersion": obj.get("apiVersion"), + "count": obj.get("count"), + "eventTime": obj.get("eventTime"), + "firstTimestamp": obj.get("firstTimestamp"), + "involvedObject": ObjectReference.from_dict(obj["involvedObject"]) if obj.get("involvedObject") is not None else None, + "kind": obj.get("kind"), + "lastTimestamp": obj.get("lastTimestamp"), + "message": obj.get("message"), + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "reason": obj.get("reason"), + "related": ObjectReference.from_dict(obj["related"]) if obj.get("related") is not None else None, + "reportingComponent": obj.get("reportingComponent"), + "reportingInstance": obj.get("reportingInstance"), + "series": EventSeries.from_dict(obj["series"]) if obj.get("series") is not None else None, + "source": EventSource.from_dict(obj["source"]) if obj.get("source") is not None else None, + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/event_series.py b/sdks/python/client/argo_workflows/models/event_series.py new file mode 100644 index 000000000000..35b70ee29b7e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/event_series.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class EventSeries(BaseModel): + """ + EventSeries contain information on series of events, i.e. thing that was/is happening continuously for some time. + """ # noqa: E501 + count: Optional[StrictInt] = Field(default=None, description="Number of occurrences in this series up to the last heartbeat time") + last_observed_time: Optional[datetime] = Field(default=None, description="MicroTime is version of Time with microsecond level precision.", alias="lastObservedTime") + __properties: ClassVar[List[str]] = ["count", "lastObservedTime"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EventSeries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EventSeries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "count": obj.get("count"), + "lastObservedTime": obj.get("lastObservedTime") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/event_source.py b/sdks/python/client/argo_workflows/models/event_source.py new file mode 100644 index 000000000000..4391afb0ccf1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/event_source.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class EventSource(BaseModel): + """ + EventSource contains information for an event. + """ # noqa: E501 + component: Optional[StrictStr] = Field(default=None, description="Component from which the event is generated.") + host: Optional[StrictStr] = Field(default=None, description="Node name on which the event is generated.") + __properties: ClassVar[List[str]] = ["component", "host"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "component": obj.get("component"), + "host": obj.get("host") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/eventsource_create_event_source_request.py b/sdks/python/client/argo_workflows/models/eventsource_create_event_source_request.py new file mode 100644 index 000000000000..7d9acbc5c5b7 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/eventsource_create_event_source_request.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from typing import Optional, Set +from typing_extensions import Self + +class EventsourceCreateEventSourceRequest(BaseModel): + """ + EventsourceCreateEventSourceRequest + """ # noqa: E501 + event_source: Optional[IoArgoprojEventsV1alpha1EventSource] = Field(default=None, alias="eventSource") + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["eventSource", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EventsourceCreateEventSourceRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of event_source + if self.event_source: + _dict['eventSource'] = self.event_source.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EventsourceCreateEventSourceRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "eventSource": IoArgoprojEventsV1alpha1EventSource.from_dict(obj["eventSource"]) if obj.get("eventSource") is not None else None, + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/eventsource_event_source_watch_event.py b/sdks/python/client/argo_workflows/models/eventsource_event_source_watch_event.py new file mode 100644 index 000000000000..b5bb171a72b7 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/eventsource_event_source_watch_event.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from typing import Optional, Set +from typing_extensions import Self + +class EventsourceEventSourceWatchEvent(BaseModel): + """ + EventsourceEventSourceWatchEvent + """ # noqa: E501 + object: Optional[IoArgoprojEventsV1alpha1EventSource] = None + type: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["object", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EventsourceEventSourceWatchEvent from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of object + if self.object: + _dict['object'] = self.object.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EventsourceEventSourceWatchEvent from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "object": IoArgoprojEventsV1alpha1EventSource.from_dict(obj["object"]) if obj.get("object") is not None else None, + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/eventsource_log_entry.py b/sdks/python/client/argo_workflows/models/eventsource_log_entry.py new file mode 100644 index 000000000000..4601b599a35b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/eventsource_log_entry.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class EventsourceLogEntry(BaseModel): + """ + EventsourceLogEntry + """ # noqa: E501 + event_name: Optional[StrictStr] = Field(default=None, alias="eventName") + event_source_name: Optional[StrictStr] = Field(default=None, alias="eventSourceName") + event_source_type: Optional[StrictStr] = Field(default=None, alias="eventSourceType") + level: Optional[StrictStr] = None + msg: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + time: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.") + __properties: ClassVar[List[str]] = ["eventName", "eventSourceName", "eventSourceType", "level", "msg", "namespace", "time"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EventsourceLogEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EventsourceLogEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "eventName": obj.get("eventName"), + "eventSourceName": obj.get("eventSourceName"), + "eventSourceType": obj.get("eventSourceType"), + "level": obj.get("level"), + "msg": obj.get("msg"), + "namespace": obj.get("namespace"), + "time": obj.get("time") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/eventsource_update_event_source_request.py b/sdks/python/client/argo_workflows/models/eventsource_update_event_source_request.py new file mode 100644 index 000000000000..1abcf19958d0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/eventsource_update_event_source_request.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from typing import Optional, Set +from typing_extensions import Self + +class EventsourceUpdateEventSourceRequest(BaseModel): + """ + EventsourceUpdateEventSourceRequest + """ # noqa: E501 + event_source: Optional[IoArgoprojEventsV1alpha1EventSource] = Field(default=None, alias="eventSource") + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["eventSource", "name", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of EventsourceUpdateEventSourceRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of event_source + if self.event_source: + _dict['eventSource'] = self.event_source.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of EventsourceUpdateEventSourceRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "eventSource": IoArgoprojEventsV1alpha1EventSource.from_dict(obj["eventSource"]) if obj.get("eventSource") is not None else None, + "name": obj.get("name"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/exec_action.py b/sdks/python/client/argo_workflows/models/exec_action.py new file mode 100644 index 000000000000..40109101d05c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/exec_action.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ExecAction(BaseModel): + """ + ExecAction describes a \"run in container\" action. + """ # noqa: E501 + command: Optional[List[StrictStr]] = Field(default=None, description="Command is the command line to execute inside the container, the working directory for the command is root ('/') in the container's filesystem. The command is simply exec'd, it is not run inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and non-zero is unhealthy.") + __properties: ClassVar[List[str]] = ["command"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ExecAction from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ExecAction from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "command": obj.get("command") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/fc_volume_source.py b/sdks/python/client/argo_workflows/models/fc_volume_source.py new file mode 100644 index 000000000000..4a7c02f34329 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/fc_volume_source.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class FCVolumeSource(BaseModel): + """ + Represents a Fibre Channel volume. Fibre Channel volumes can only be mounted as read/write once. Fibre Channel volumes support ownership management and SELinux relabeling. + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.", alias="fsType") + lun: Optional[StrictInt] = Field(default=None, description="Optional: FC target lun number") + read_only: Optional[StrictBool] = Field(default=None, description="Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.", alias="readOnly") + target_wwns: Optional[List[StrictStr]] = Field(default=None, description="Optional: FC target worldwide names (WWNs)", alias="targetWWNs") + wwids: Optional[List[StrictStr]] = Field(default=None, description="Optional: FC volume world wide identifiers (wwids) Either wwids or combination of targetWWNs and lun must be set, but not both simultaneously.") + __properties: ClassVar[List[str]] = ["fsType", "lun", "readOnly", "targetWWNs", "wwids"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FCVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FCVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "lun": obj.get("lun"), + "readOnly": obj.get("readOnly"), + "targetWWNs": obj.get("targetWWNs"), + "wwids": obj.get("wwids") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/flex_volume_source.py b/sdks/python/client/argo_workflows/models/flex_volume_source.py new file mode 100644 index 000000000000..19319dae01b0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/flex_volume_source.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.local_object_reference import LocalObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class FlexVolumeSource(BaseModel): + """ + FlexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin. + """ # noqa: E501 + driver: StrictStr = Field(description="Driver is the name of the driver to use for this volume.") + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". The default filesystem depends on FlexVolume script.", alias="fsType") + options: Optional[Dict[str, StrictStr]] = Field(default=None, description="Optional: Extra command options if any.") + read_only: Optional[StrictBool] = Field(default=None, description="Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.", alias="readOnly") + secret_ref: Optional[LocalObjectReference] = Field(default=None, alias="secretRef") + __properties: ClassVar[List[str]] = ["driver", "fsType", "options", "readOnly", "secretRef"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FlexVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of secret_ref + if self.secret_ref: + _dict['secretRef'] = self.secret_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FlexVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "driver": obj.get("driver"), + "fsType": obj.get("fsType"), + "options": obj.get("options"), + "readOnly": obj.get("readOnly"), + "secretRef": LocalObjectReference.from_dict(obj["secretRef"]) if obj.get("secretRef") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/flocker_volume_source.py b/sdks/python/client/argo_workflows/models/flocker_volume_source.py new file mode 100644 index 000000000000..cd7b81ed6930 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/flocker_volume_source.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class FlockerVolumeSource(BaseModel): + """ + Represents a Flocker volume mounted by the Flocker agent. One and only one of datasetName and datasetUUID should be set. Flocker volumes do not support ownership management or SELinux relabeling. + """ # noqa: E501 + dataset_name: Optional[StrictStr] = Field(default=None, description="Name of the dataset stored as metadata -> name on the dataset for Flocker should be considered as deprecated", alias="datasetName") + dataset_uuid: Optional[StrictStr] = Field(default=None, description="UUID of the dataset. This is unique identifier of a Flocker dataset", alias="datasetUUID") + __properties: ClassVar[List[str]] = ["datasetName", "datasetUUID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FlockerVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FlockerVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "datasetName": obj.get("datasetName"), + "datasetUUID": obj.get("datasetUUID") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/gce_persistent_disk_volume_source.py b/sdks/python/client/argo_workflows/models/gce_persistent_disk_volume_source.py new file mode 100644 index 000000000000..4298ec6ef8ac --- /dev/null +++ b/sdks/python/client/argo_workflows/models/gce_persistent_disk_volume_source.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GCEPersistentDiskVolumeSource(BaseModel): + """ + Represents a Persistent Disk resource in Google Compute Engine. A GCE PD must exist before mounting to a container. The disk must also be in the same GCE project and zone as the kubelet. A GCE PD can only be mounted as read/write once or read-only many times. GCE PDs support ownership management and SELinux relabeling. + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", alias="fsType") + partition: Optional[StrictInt] = Field(default=None, description="The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty). More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk") + pd_name: StrictStr = Field(description="Unique name of the PD resource in GCE. Used to identify the disk in GCE. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", alias="pdName") + read_only: Optional[StrictBool] = Field(default=None, description="ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", alias="readOnly") + __properties: ClassVar[List[str]] = ["fsType", "partition", "pdName", "readOnly"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GCEPersistentDiskVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GCEPersistentDiskVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "partition": obj.get("partition"), + "pdName": obj.get("pdName"), + "readOnly": obj.get("readOnly") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/git_repo_volume_source.py b/sdks/python/client/argo_workflows/models/git_repo_volume_source.py new file mode 100644 index 000000000000..c847e7c21377 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/git_repo_volume_source.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GitRepoVolumeSource(BaseModel): + """ + Represents a volume that is populated with the contents of a git repository. Git repo volumes do not support ownership management. Git repo volumes support SELinux relabeling. DEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container. + """ # noqa: E501 + directory: Optional[StrictStr] = Field(default=None, description="Target directory name. Must not contain or start with '..'. If '.' is supplied, the volume directory will be the git repository. Otherwise, if specified, the volume will contain the git repository in the subdirectory with the given name.") + repository: StrictStr = Field(description="Repository URL") + revision: Optional[StrictStr] = Field(default=None, description="Commit hash for the specified revision.") + __properties: ClassVar[List[str]] = ["directory", "repository", "revision"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GitRepoVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GitRepoVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "directory": obj.get("directory"), + "repository": obj.get("repository"), + "revision": obj.get("revision") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/glusterfs_volume_source.py b/sdks/python/client/argo_workflows/models/glusterfs_volume_source.py new file mode 100644 index 000000000000..c0916a007ab0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/glusterfs_volume_source.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GlusterfsVolumeSource(BaseModel): + """ + Represents a Glusterfs mount that lasts the lifetime of a pod. Glusterfs volumes do not support ownership management or SELinux relabeling. + """ # noqa: E501 + endpoints: StrictStr = Field(description="EndpointsName is the endpoint name that details Glusterfs topology. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod") + path: StrictStr = Field(description="Path is the Glusterfs volume path. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod") + read_only: Optional[StrictBool] = Field(default=None, description="ReadOnly here will force the Glusterfs volume to be mounted with read-only permissions. Defaults to false. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod", alias="readOnly") + __properties: ClassVar[List[str]] = ["endpoints", "path", "readOnly"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GlusterfsVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GlusterfsVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "endpoints": obj.get("endpoints"), + "path": obj.get("path"), + "readOnly": obj.get("readOnly") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/google_protobuf_any.py b/sdks/python/client/argo_workflows/models/google_protobuf_any.py new file mode 100644 index 000000000000..9b0b4ee47213 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/google_protobuf_any.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class GoogleProtobufAny(BaseModel): + """ + GoogleProtobufAny + """ # noqa: E501 + type_url: Optional[StrictStr] = None + value: Optional[Union[Annotated[bytes, Field(strict=True)], Annotated[str, Field(strict=True)]]] = None + __properties: ClassVar[List[str]] = ["type_url", "value"] + + @field_validator('value') + def value_validate_regular_expression(cls, value): + """Validates the regular expression""" + if value is None: + return value + + if not re.match(r"^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$", value): + raise ValueError(r"must validate the regular expression /^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleProtobufAny from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleProtobufAny from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "type_url": obj.get("type_url"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/group_version_resource.py b/sdks/python/client/argo_workflows/models/group_version_resource.py new file mode 100644 index 000000000000..b022068d4b56 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/group_version_resource.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GroupVersionResource(BaseModel): + """ + +protobuf.options.(gogoproto.goproto_stringer)=false + """ # noqa: E501 + group: Optional[StrictStr] = None + resource: Optional[StrictStr] = None + version: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["group", "resource", "version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GroupVersionResource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GroupVersionResource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "group": obj.get("group"), + "resource": obj.get("resource"), + "version": obj.get("version") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/grpc_action.py b/sdks/python/client/argo_workflows/models/grpc_action.py new file mode 100644 index 000000000000..a00c86ed430d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/grpc_action.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GRPCAction(BaseModel): + """ + GRPCAction + """ # noqa: E501 + port: StrictInt = Field(description="Port number of the gRPC service. Number must be in the range 1 to 65535.") + service: Optional[StrictStr] = Field(default=None, description="Service is the name of the service to place in the gRPC HealthCheckRequest (see https://github.com/grpc/grpc/blob/master/doc/health-checking.md). If this is not specified, the default behavior is defined by gRPC.") + __properties: ClassVar[List[str]] = ["port", "service"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GRPCAction from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GRPCAction from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "port": obj.get("port"), + "service": obj.get("service") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/grpc_gateway_runtime_error.py b/sdks/python/client/argo_workflows/models/grpc_gateway_runtime_error.py new file mode 100644 index 000000000000..e6ad0959b25f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/grpc_gateway_runtime_error.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.google_protobuf_any import GoogleProtobufAny +from typing import Optional, Set +from typing_extensions import Self + +class GrpcGatewayRuntimeError(BaseModel): + """ + GrpcGatewayRuntimeError + """ # noqa: E501 + code: Optional[StrictInt] = None + details: Optional[List[GoogleProtobufAny]] = None + error: Optional[StrictStr] = None + message: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["code", "details", "error", "message"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GrpcGatewayRuntimeError from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in details (list) + _items = [] + if self.details: + for _item in self.details: + if _item: + _items.append(_item.to_dict()) + _dict['details'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GrpcGatewayRuntimeError from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "code": obj.get("code"), + "details": [GoogleProtobufAny.from_dict(_item) for _item in obj["details"]] if obj.get("details") is not None else None, + "error": obj.get("error"), + "message": obj.get("message") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/grpc_gateway_runtime_stream_error.py b/sdks/python/client/argo_workflows/models/grpc_gateway_runtime_stream_error.py new file mode 100644 index 000000000000..7e0be7285efa --- /dev/null +++ b/sdks/python/client/argo_workflows/models/grpc_gateway_runtime_stream_error.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.google_protobuf_any import GoogleProtobufAny +from typing import Optional, Set +from typing_extensions import Self + +class GrpcGatewayRuntimeStreamError(BaseModel): + """ + GrpcGatewayRuntimeStreamError + """ # noqa: E501 + details: Optional[List[GoogleProtobufAny]] = None + grpc_code: Optional[StrictInt] = None + http_code: Optional[StrictInt] = None + http_status: Optional[StrictStr] = None + message: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["details", "grpc_code", "http_code", "http_status", "message"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GrpcGatewayRuntimeStreamError from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in details (list) + _items = [] + if self.details: + for _item in self.details: + if _item: + _items.append(_item.to_dict()) + _dict['details'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GrpcGatewayRuntimeStreamError from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "details": [GoogleProtobufAny.from_dict(_item) for _item in obj["details"]] if obj.get("details") is not None else None, + "grpc_code": obj.get("grpc_code"), + "http_code": obj.get("http_code"), + "http_status": obj.get("http_status"), + "message": obj.get("message") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/host_alias.py b/sdks/python/client/argo_workflows/models/host_alias.py new file mode 100644 index 000000000000..953f9193b71b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/host_alias.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class HostAlias(BaseModel): + """ + HostAlias holds the mapping between IP and hostnames that will be injected as an entry in the pod's hosts file. + """ # noqa: E501 + hostnames: Optional[List[StrictStr]] = Field(default=None, description="Hostnames for the above IP address.") + ip: Optional[StrictStr] = Field(default=None, description="IP address of the host file entry.") + __properties: ClassVar[List[str]] = ["hostnames", "ip"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of HostAlias from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of HostAlias from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "hostnames": obj.get("hostnames"), + "ip": obj.get("ip") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/host_path_volume_source.py b/sdks/python/client/argo_workflows/models/host_path_volume_source.py new file mode 100644 index 000000000000..1c1e98c97486 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/host_path_volume_source.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class HostPathVolumeSource(BaseModel): + """ + Represents a host path mapped into a pod. Host path volumes do not support ownership management or SELinux relabeling. + """ # noqa: E501 + path: StrictStr = Field(description="Path of the directory on the host. If the path is a symlink, it will follow the link to the real path. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath") + type: Optional[StrictStr] = Field(default=None, description="Type for HostPath Volume Defaults to \"\" More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath") + __properties: ClassVar[List[str]] = ["path", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of HostPathVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of HostPathVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "path": obj.get("path"), + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/http_get_action.py b/sdks/python/client/argo_workflows/models/http_get_action.py new file mode 100644 index 000000000000..1e5f8a930940 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/http_get_action.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.http_header import HTTPHeader +from typing import Optional, Set +from typing_extensions import Self + +class HTTPGetAction(BaseModel): + """ + HTTPGetAction describes an action based on HTTP Get requests. + """ # noqa: E501 + host: Optional[StrictStr] = Field(default=None, description="Host name to connect to, defaults to the pod IP. You probably want to set \"Host\" in httpHeaders instead.") + http_headers: Optional[List[HTTPHeader]] = Field(default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", alias="httpHeaders") + path: Optional[StrictStr] = Field(default=None, description="Path to access on the HTTP server.") + port: StrictStr + scheme: Optional[StrictStr] = Field(default=None, description="Scheme to use for connecting to the host. Defaults to HTTP. Possible enum values: - `\"HTTP\"` means that the scheme used will be http:// - `\"HTTPS\"` means that the scheme used will be https://") + __properties: ClassVar[List[str]] = ["host", "httpHeaders", "path", "port", "scheme"] + + @field_validator('scheme') + def scheme_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['HTTP', 'HTTPS']): + raise ValueError("must be one of enum values ('HTTP', 'HTTPS')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of HTTPGetAction from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in http_headers (list) + _items = [] + if self.http_headers: + for _item in self.http_headers: + if _item: + _items.append(_item.to_dict()) + _dict['httpHeaders'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of HTTPGetAction from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "host": obj.get("host"), + "httpHeaders": [HTTPHeader.from_dict(_item) for _item in obj["httpHeaders"]] if obj.get("httpHeaders") is not None else None, + "path": obj.get("path"), + "port": obj.get("port"), + "scheme": obj.get("scheme") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/http_header.py b/sdks/python/client/argo_workflows/models/http_header.py new file mode 100644 index 000000000000..7585975c5a75 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/http_header.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class HTTPHeader(BaseModel): + """ + HTTPHeader describes a custom header to be used in HTTP probes + """ # noqa: E501 + name: StrictStr = Field(description="The header field name") + value: StrictStr = Field(description="The header field value") + __properties: ClassVar[List[str]] = ["name", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of HTTPHeader from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of HTTPHeader from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amount.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amount.py new file mode 100644 index 000000000000..0141454fdabe --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amount.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Amount(BaseModel): + """ + Amount represent a numeric amount. + """ # noqa: E501 + value: Optional[Union[Annotated[bytes, Field(strict=True)], Annotated[str, Field(strict=True)]]] = None + __properties: ClassVar[List[str]] = ["value"] + + @field_validator('value') + def value_validate_regular_expression(cls, value): + """Validates the regular expression""" + if value is None: + return value + + if not re.match(r"^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$", value): + raise ValueError(r"must validate the regular expression /^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Amount from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Amount from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_consume_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_consume_config.py new file mode 100644 index 000000000000..6fbe3b64c158 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_consume_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AMQPConsumeConfig(BaseModel): + """ + IoArgoprojEventsV1alpha1AMQPConsumeConfig + """ # noqa: E501 + auto_ack: Optional[StrictBool] = Field(default=None, alias="autoAck") + consumer_tag: Optional[StrictStr] = Field(default=None, alias="consumerTag") + exclusive: Optional[StrictBool] = None + no_local: Optional[StrictBool] = Field(default=None, alias="noLocal") + no_wait: Optional[StrictBool] = Field(default=None, alias="noWait") + __properties: ClassVar[List[str]] = ["autoAck", "consumerTag", "exclusive", "noLocal", "noWait"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPConsumeConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPConsumeConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "autoAck": obj.get("autoAck"), + "consumerTag": obj.get("consumerTag"), + "exclusive": obj.get("exclusive"), + "noLocal": obj.get("noLocal"), + "noWait": obj.get("noWait") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_event_source.py new file mode 100644 index 000000000000..dbedb9cb1e6a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_event_source.py @@ -0,0 +1,151 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_consume_config import IoArgoprojEventsV1alpha1AMQPConsumeConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_exchange_declare_config import IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_queue_bind_config import IoArgoprojEventsV1alpha1AMQPQueueBindConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_queue_declare_config import IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AMQPEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1AMQPEventSource + """ # noqa: E501 + auth: Optional[IoArgoprojEventsV1alpha1BasicAuth] = None + connection_backoff: Optional[IoArgoprojEventsV1alpha1Backoff] = Field(default=None, alias="connectionBackoff") + consume: Optional[IoArgoprojEventsV1alpha1AMQPConsumeConfig] = None + exchange_declare: Optional[IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig] = Field(default=None, alias="exchangeDeclare") + exchange_name: Optional[StrictStr] = Field(default=None, alias="exchangeName") + exchange_type: Optional[StrictStr] = Field(default=None, alias="exchangeType") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + queue_bind: Optional[IoArgoprojEventsV1alpha1AMQPQueueBindConfig] = Field(default=None, alias="queueBind") + queue_declare: Optional[IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig] = Field(default=None, alias="queueDeclare") + routing_key: Optional[StrictStr] = Field(default=None, alias="routingKey") + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + url: Optional[StrictStr] = None + url_secret: Optional[SecretKeySelector] = Field(default=None, alias="urlSecret") + __properties: ClassVar[List[str]] = ["auth", "connectionBackoff", "consume", "exchangeDeclare", "exchangeName", "exchangeType", "filter", "jsonBody", "metadata", "queueBind", "queueDeclare", "routingKey", "tls", "url", "urlSecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth + if self.auth: + _dict['auth'] = self.auth.to_dict() + # override the default output from pydantic by calling `to_dict()` of connection_backoff + if self.connection_backoff: + _dict['connectionBackoff'] = self.connection_backoff.to_dict() + # override the default output from pydantic by calling `to_dict()` of consume + if self.consume: + _dict['consume'] = self.consume.to_dict() + # override the default output from pydantic by calling `to_dict()` of exchange_declare + if self.exchange_declare: + _dict['exchangeDeclare'] = self.exchange_declare.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of queue_bind + if self.queue_bind: + _dict['queueBind'] = self.queue_bind.to_dict() + # override the default output from pydantic by calling `to_dict()` of queue_declare + if self.queue_declare: + _dict['queueDeclare'] = self.queue_declare.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + # override the default output from pydantic by calling `to_dict()` of url_secret + if self.url_secret: + _dict['urlSecret'] = self.url_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "auth": IoArgoprojEventsV1alpha1BasicAuth.from_dict(obj["auth"]) if obj.get("auth") is not None else None, + "connectionBackoff": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["connectionBackoff"]) if obj.get("connectionBackoff") is not None else None, + "consume": IoArgoprojEventsV1alpha1AMQPConsumeConfig.from_dict(obj["consume"]) if obj.get("consume") is not None else None, + "exchangeDeclare": IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.from_dict(obj["exchangeDeclare"]) if obj.get("exchangeDeclare") is not None else None, + "exchangeName": obj.get("exchangeName"), + "exchangeType": obj.get("exchangeType"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "queueBind": IoArgoprojEventsV1alpha1AMQPQueueBindConfig.from_dict(obj["queueBind"]) if obj.get("queueBind") is not None else None, + "queueDeclare": IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.from_dict(obj["queueDeclare"]) if obj.get("queueDeclare") is not None else None, + "routingKey": obj.get("routingKey"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "url": obj.get("url"), + "urlSecret": SecretKeySelector.from_dict(obj["urlSecret"]) if obj.get("urlSecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py new file mode 100644 index 000000000000..4931c16e1d31 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig(BaseModel): + """ + IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig + """ # noqa: E501 + auto_delete: Optional[StrictBool] = Field(default=None, alias="autoDelete") + durable: Optional[StrictBool] = None + internal: Optional[StrictBool] = None + no_wait: Optional[StrictBool] = Field(default=None, alias="noWait") + __properties: ClassVar[List[str]] = ["autoDelete", "durable", "internal", "noWait"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "autoDelete": obj.get("autoDelete"), + "durable": obj.get("durable"), + "internal": obj.get("internal"), + "noWait": obj.get("noWait") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py new file mode 100644 index 000000000000..65d2cc036d49 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AMQPQueueBindConfig(BaseModel): + """ + IoArgoprojEventsV1alpha1AMQPQueueBindConfig + """ # noqa: E501 + no_wait: Optional[StrictBool] = Field(default=None, alias="noWait") + __properties: ClassVar[List[str]] = ["noWait"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPQueueBindConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPQueueBindConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "noWait": obj.get("noWait") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py new file mode 100644 index 000000000000..4241674f6ebd --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig(BaseModel): + """ + IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig + """ # noqa: E501 + arguments: Optional[StrictStr] = None + auto_delete: Optional[StrictBool] = Field(default=None, alias="autoDelete") + durable: Optional[StrictBool] = None + exclusive: Optional[StrictBool] = None + name: Optional[StrictStr] = None + no_wait: Optional[StrictBool] = Field(default=None, alias="noWait") + __properties: ClassVar[List[str]] = ["arguments", "autoDelete", "durable", "exclusive", "name", "noWait"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "arguments": obj.get("arguments"), + "autoDelete": obj.get("autoDelete"), + "durable": obj.get("durable"), + "exclusive": obj.get("exclusive"), + "name": obj.get("name"), + "noWait": obj.get("noWait") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_argo_workflow_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_argo_workflow_trigger.py new file mode 100644 index 000000000000..4b44242b0569 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_argo_workflow_trigger.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1ArgoWorkflowTrigger(BaseModel): + """ + IoArgoprojEventsV1alpha1ArgoWorkflowTrigger + """ # noqa: E501 + args: Optional[List[StrictStr]] = None + operation: Optional[StrictStr] = None + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + source: Optional[IoArgoprojEventsV1alpha1ArtifactLocation] = None + __properties: ClassVar[List[str]] = ["args", "operation", "parameters", "source"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ArgoWorkflowTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of source + if self.source: + _dict['source'] = self.source.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ArgoWorkflowTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "args": obj.get("args"), + "operation": obj.get("operation"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "source": IoArgoprojEventsV1alpha1ArtifactLocation.from_dict(obj["source"]) if obj.get("source") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_artifact_location.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_artifact_location.py new file mode 100644 index 000000000000..533fe9a6ad5d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_artifact_location.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from argo_workflows.models.io_argoproj_events_v1alpha1_file_artifact import IoArgoprojEventsV1alpha1FileArtifact +from argo_workflows.models.io_argoproj_events_v1alpha1_git_artifact import IoArgoprojEventsV1alpha1GitArtifact +from argo_workflows.models.io_argoproj_events_v1alpha1_resource import IoArgoprojEventsV1alpha1Resource +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact +from argo_workflows.models.io_argoproj_events_v1alpha1_url_artifact import IoArgoprojEventsV1alpha1URLArtifact +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1ArtifactLocation(BaseModel): + """ + IoArgoprojEventsV1alpha1ArtifactLocation + """ # noqa: E501 + configmap: Optional[ConfigMapKeySelector] = None + file: Optional[IoArgoprojEventsV1alpha1FileArtifact] = None + git: Optional[IoArgoprojEventsV1alpha1GitArtifact] = None + inline: Optional[StrictStr] = None + resource: Optional[IoArgoprojEventsV1alpha1Resource] = None + s3: Optional[IoArgoprojEventsV1alpha1S3Artifact] = None + url: Optional[IoArgoprojEventsV1alpha1URLArtifact] = None + __properties: ClassVar[List[str]] = ["configmap", "file", "git", "inline", "resource", "s3", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ArtifactLocation from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of configmap + if self.configmap: + _dict['configmap'] = self.configmap.to_dict() + # override the default output from pydantic by calling `to_dict()` of file + if self.file: + _dict['file'] = self.file.to_dict() + # override the default output from pydantic by calling `to_dict()` of git + if self.git: + _dict['git'] = self.git.to_dict() + # override the default output from pydantic by calling `to_dict()` of resource + if self.resource: + _dict['resource'] = self.resource.to_dict() + # override the default output from pydantic by calling `to_dict()` of s3 + if self.s3: + _dict['s3'] = self.s3.to_dict() + # override the default output from pydantic by calling `to_dict()` of url + if self.url: + _dict['url'] = self.url.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ArtifactLocation from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configmap": ConfigMapKeySelector.from_dict(obj["configmap"]) if obj.get("configmap") is not None else None, + "file": IoArgoprojEventsV1alpha1FileArtifact.from_dict(obj["file"]) if obj.get("file") is not None else None, + "git": IoArgoprojEventsV1alpha1GitArtifact.from_dict(obj["git"]) if obj.get("git") is not None else None, + "inline": obj.get("inline"), + "resource": IoArgoprojEventsV1alpha1Resource.from_dict(obj["resource"]) if obj.get("resource") is not None else None, + "s3": IoArgoprojEventsV1alpha1S3Artifact.from_dict(obj["s3"]) if obj.get("s3") is not None else None, + "url": IoArgoprojEventsV1alpha1URLArtifact.from_dict(obj["url"]) if obj.get("url") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_aws_lambda_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_aws_lambda_trigger.py new file mode 100644 index 000000000000..a12855de2417 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_aws_lambda_trigger.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AWSLambdaTrigger(BaseModel): + """ + IoArgoprojEventsV1alpha1AWSLambdaTrigger + """ # noqa: E501 + access_key: Optional[SecretKeySelector] = Field(default=None, alias="accessKey") + function_name: Optional[StrictStr] = Field(default=None, description="FunctionName refers to the name of the function to invoke.", alias="functionName") + invocation_type: Optional[StrictStr] = Field(default=None, description="Choose from the following options. * RequestResponse (default) - Invoke the function synchronously. Keep the connection open until the function returns a response or times out. The API response includes the function response and additional data. * Event - Invoke the function asynchronously. Send events that fail multiple times to the function's dead-letter queue (if it's configured). The API response only includes a status code. * DryRun - Validate parameter values and verify that the user or role has permission to invoke the function. +optional", alias="invocationType") + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + payload: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Payload is the list of key-value extracted from an event payload to construct the request payload.") + region: Optional[StrictStr] = None + role_arn: Optional[StrictStr] = Field(default=None, alias="roleARN") + secret_key: Optional[SecretKeySelector] = Field(default=None, alias="secretKey") + __properties: ClassVar[List[str]] = ["accessKey", "functionName", "invocationType", "parameters", "payload", "region", "roleARN", "secretKey"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AWSLambdaTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_key + if self.access_key: + _dict['accessKey'] = self.access_key.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in payload (list) + _items = [] + if self.payload: + for _item in self.payload: + if _item: + _items.append(_item.to_dict()) + _dict['payload'] = _items + # override the default output from pydantic by calling `to_dict()` of secret_key + if self.secret_key: + _dict['secretKey'] = self.secret_key.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AWSLambdaTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessKey": SecretKeySelector.from_dict(obj["accessKey"]) if obj.get("accessKey") is not None else None, + "functionName": obj.get("functionName"), + "invocationType": obj.get("invocationType"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "payload": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["payload"]] if obj.get("payload") is not None else None, + "region": obj.get("region"), + "roleARN": obj.get("roleARN"), + "secretKey": SecretKeySelector.from_dict(obj["secretKey"]) if obj.get("secretKey") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py new file mode 100644 index 000000000000..90e21d66ccaa --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py @@ -0,0 +1,119 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AzureEventHubsTrigger(BaseModel): + """ + IoArgoprojEventsV1alpha1AzureEventHubsTrigger + """ # noqa: E501 + fqdn: Optional[StrictStr] = None + hub_name: Optional[StrictStr] = Field(default=None, alias="hubName") + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + payload: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Payload is the list of key-value extracted from an event payload to construct the request payload.") + shared_access_key: Optional[SecretKeySelector] = Field(default=None, alias="sharedAccessKey") + shared_access_key_name: Optional[SecretKeySelector] = Field(default=None, alias="sharedAccessKeyName") + __properties: ClassVar[List[str]] = ["fqdn", "hubName", "parameters", "payload", "sharedAccessKey", "sharedAccessKeyName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureEventHubsTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in payload (list) + _items = [] + if self.payload: + for _item in self.payload: + if _item: + _items.append(_item.to_dict()) + _dict['payload'] = _items + # override the default output from pydantic by calling `to_dict()` of shared_access_key + if self.shared_access_key: + _dict['sharedAccessKey'] = self.shared_access_key.to_dict() + # override the default output from pydantic by calling `to_dict()` of shared_access_key_name + if self.shared_access_key_name: + _dict['sharedAccessKeyName'] = self.shared_access_key_name.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureEventHubsTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fqdn": obj.get("fqdn"), + "hubName": obj.get("hubName"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "payload": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["payload"]] if obj.get("payload") is not None else None, + "sharedAccessKey": SecretKeySelector.from_dict(obj["sharedAccessKey"]) if obj.get("sharedAccessKey") is not None else None, + "sharedAccessKeyName": SecretKeySelector.from_dict(obj["sharedAccessKeyName"]) if obj.get("sharedAccessKeyName") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py new file mode 100644 index 000000000000..8ad3fe8315c9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AzureEventsHubEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1AzureEventsHubEventSource + """ # noqa: E501 + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + fqdn: Optional[StrictStr] = None + hub_name: Optional[StrictStr] = Field(default=None, alias="hubName") + metadata: Optional[Dict[str, StrictStr]] = None + shared_access_key: Optional[SecretKeySelector] = Field(default=None, alias="sharedAccessKey") + shared_access_key_name: Optional[SecretKeySelector] = Field(default=None, alias="sharedAccessKeyName") + __properties: ClassVar[List[str]] = ["filter", "fqdn", "hubName", "metadata", "sharedAccessKey", "sharedAccessKeyName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureEventsHubEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of shared_access_key + if self.shared_access_key: + _dict['sharedAccessKey'] = self.shared_access_key.to_dict() + # override the default output from pydantic by calling `to_dict()` of shared_access_key_name + if self.shared_access_key_name: + _dict['sharedAccessKeyName'] = self.shared_access_key_name.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureEventsHubEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "fqdn": obj.get("fqdn"), + "hubName": obj.get("hubName"), + "metadata": obj.get("metadata"), + "sharedAccessKey": SecretKeySelector.from_dict(obj["sharedAccessKey"]) if obj.get("sharedAccessKey") is not None else None, + "sharedAccessKeyName": SecretKeySelector.from_dict(obj["sharedAccessKeyName"]) if obj.get("sharedAccessKeyName") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py new file mode 100644 index 000000000000..9dd6b98e6a43 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AzureQueueStorageEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1AzureQueueStorageEventSource + """ # noqa: E501 + connection_string: Optional[SecretKeySelector] = Field(default=None, alias="connectionString") + decode_message: Optional[StrictBool] = Field(default=None, alias="decodeMessage") + dlq: Optional[StrictBool] = None + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + queue_name: Optional[StrictStr] = Field(default=None, alias="queueName") + storage_account_name: Optional[StrictStr] = Field(default=None, alias="storageAccountName") + wait_time_in_seconds: Optional[StrictInt] = Field(default=None, alias="waitTimeInSeconds") + __properties: ClassVar[List[str]] = ["connectionString", "decodeMessage", "dlq", "filter", "jsonBody", "metadata", "queueName", "storageAccountName", "waitTimeInSeconds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureQueueStorageEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of connection_string + if self.connection_string: + _dict['connectionString'] = self.connection_string.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureQueueStorageEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "connectionString": SecretKeySelector.from_dict(obj["connectionString"]) if obj.get("connectionString") is not None else None, + "decodeMessage": obj.get("decodeMessage"), + "dlq": obj.get("dlq"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "queueName": obj.get("queueName"), + "storageAccountName": obj.get("storageAccountName"), + "waitTimeInSeconds": obj.get("waitTimeInSeconds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py new file mode 100644 index 000000000000..136938316b18 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AzureServiceBusEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1AzureServiceBusEventSource + """ # noqa: E501 + connection_string: Optional[SecretKeySelector] = Field(default=None, alias="connectionString") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + fully_qualified_namespace: Optional[StrictStr] = Field(default=None, alias="fullyQualifiedNamespace") + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + queue_name: Optional[StrictStr] = Field(default=None, alias="queueName") + subscription_name: Optional[StrictStr] = Field(default=None, alias="subscriptionName") + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + topic_name: Optional[StrictStr] = Field(default=None, alias="topicName") + __properties: ClassVar[List[str]] = ["connectionString", "filter", "fullyQualifiedNamespace", "jsonBody", "metadata", "queueName", "subscriptionName", "tls", "topicName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureServiceBusEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of connection_string + if self.connection_string: + _dict['connectionString'] = self.connection_string.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureServiceBusEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "connectionString": SecretKeySelector.from_dict(obj["connectionString"]) if obj.get("connectionString") is not None else None, + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "fullyQualifiedNamespace": obj.get("fullyQualifiedNamespace"), + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "queueName": obj.get("queueName"), + "subscriptionName": obj.get("subscriptionName"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "topicName": obj.get("topicName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py new file mode 100644 index 000000000000..24e938764e43 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py @@ -0,0 +1,122 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1AzureServiceBusTrigger(BaseModel): + """ + IoArgoprojEventsV1alpha1AzureServiceBusTrigger + """ # noqa: E501 + connection_string: Optional[SecretKeySelector] = Field(default=None, alias="connectionString") + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + payload: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Payload is the list of key-value extracted from an event payload to construct the request payload.") + queue_name: Optional[StrictStr] = Field(default=None, alias="queueName") + subscription_name: Optional[StrictStr] = Field(default=None, alias="subscriptionName") + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + topic_name: Optional[StrictStr] = Field(default=None, alias="topicName") + __properties: ClassVar[List[str]] = ["connectionString", "parameters", "payload", "queueName", "subscriptionName", "tls", "topicName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureServiceBusTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of connection_string + if self.connection_string: + _dict['connectionString'] = self.connection_string.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in payload (list) + _items = [] + if self.payload: + for _item in self.payload: + if _item: + _items.append(_item.to_dict()) + _dict['payload'] = _items + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1AzureServiceBusTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "connectionString": SecretKeySelector.from_dict(obj["connectionString"]) if obj.get("connectionString") is not None else None, + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "payload": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["payload"]] if obj.get("payload") is not None else None, + "queueName": obj.get("queueName"), + "subscriptionName": obj.get("subscriptionName"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "topicName": obj.get("topicName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_backoff.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_backoff.py new file mode 100644 index 000000000000..6e66df118d75 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_backoff.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_amount import IoArgoprojEventsV1alpha1Amount +from argo_workflows.models.io_argoproj_events_v1alpha1_int64_or_string import IoArgoprojEventsV1alpha1Int64OrString +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Backoff(BaseModel): + """ + IoArgoprojEventsV1alpha1Backoff + """ # noqa: E501 + duration: Optional[IoArgoprojEventsV1alpha1Int64OrString] = None + factor: Optional[IoArgoprojEventsV1alpha1Amount] = None + jitter: Optional[IoArgoprojEventsV1alpha1Amount] = None + steps: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["duration", "factor", "jitter", "steps"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Backoff from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of duration + if self.duration: + _dict['duration'] = self.duration.to_dict() + # override the default output from pydantic by calling `to_dict()` of factor + if self.factor: + _dict['factor'] = self.factor.to_dict() + # override the default output from pydantic by calling `to_dict()` of jitter + if self.jitter: + _dict['jitter'] = self.jitter.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Backoff from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "duration": IoArgoprojEventsV1alpha1Int64OrString.from_dict(obj["duration"]) if obj.get("duration") is not None else None, + "factor": IoArgoprojEventsV1alpha1Amount.from_dict(obj["factor"]) if obj.get("factor") is not None else None, + "jitter": IoArgoprojEventsV1alpha1Amount.from_dict(obj["jitter"]) if obj.get("jitter") is not None else None, + "steps": obj.get("steps") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_basic_auth.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_basic_auth.py new file mode 100644 index 000000000000..11a46f6ee61c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_basic_auth.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1BasicAuth(BaseModel): + """ + IoArgoprojEventsV1alpha1BasicAuth + """ # noqa: E501 + password: Optional[SecretKeySelector] = None + username: Optional[SecretKeySelector] = None + __properties: ClassVar[List[str]] = ["password", "username"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BasicAuth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of password + if self.password: + _dict['password'] = self.password.to_dict() + # override the default output from pydantic by calling `to_dict()` of username + if self.username: + _dict['username'] = self.username.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BasicAuth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "password": SecretKeySelector.from_dict(obj["password"]) if obj.get("password") is not None else None, + "username": SecretKeySelector.from_dict(obj["username"]) if obj.get("username") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_auth.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_auth.py new file mode 100644 index 000000000000..454161db5e04 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_auth.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_basic_auth import IoArgoprojEventsV1alpha1BitbucketBasicAuth +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1BitbucketAuth(BaseModel): + """ + IoArgoprojEventsV1alpha1BitbucketAuth + """ # noqa: E501 + basic: Optional[IoArgoprojEventsV1alpha1BitbucketBasicAuth] = None + oauth_token: Optional[SecretKeySelector] = Field(default=None, alias="oauthToken") + __properties: ClassVar[List[str]] = ["basic", "oauthToken"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketAuth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of basic + if self.basic: + _dict['basic'] = self.basic.to_dict() + # override the default output from pydantic by calling `to_dict()` of oauth_token + if self.oauth_token: + _dict['oauthToken'] = self.oauth_token.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketAuth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "basic": IoArgoprojEventsV1alpha1BitbucketBasicAuth.from_dict(obj["basic"]) if obj.get("basic") is not None else None, + "oauthToken": SecretKeySelector.from_dict(obj["oauthToken"]) if obj.get("oauthToken") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py new file mode 100644 index 000000000000..91f29932e7f5 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1BitbucketBasicAuth(BaseModel): + """ + IoArgoprojEventsV1alpha1BitbucketBasicAuth + """ # noqa: E501 + password: Optional[SecretKeySelector] = None + username: Optional[SecretKeySelector] = None + __properties: ClassVar[List[str]] = ["password", "username"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketBasicAuth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of password + if self.password: + _dict['password'] = self.password.to_dict() + # override the default output from pydantic by calling `to_dict()` of username + if self.username: + _dict['username'] = self.username.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketBasicAuth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "password": SecretKeySelector.from_dict(obj["password"]) if obj.get("password") is not None else None, + "username": SecretKeySelector.from_dict(obj["username"]) if obj.get("username") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_event_source.py new file mode 100644 index 000000000000..81a173eb99b3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_event_source.py @@ -0,0 +1,125 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_auth import IoArgoprojEventsV1alpha1BitbucketAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_repository import IoArgoprojEventsV1alpha1BitbucketRepository +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1BitbucketEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1BitbucketEventSource + """ # noqa: E501 + auth: Optional[IoArgoprojEventsV1alpha1BitbucketAuth] = None + delete_hook_on_finish: Optional[StrictBool] = Field(default=None, alias="deleteHookOnFinish") + events: Optional[List[StrictStr]] = Field(default=None, description="Events this webhook is subscribed to.") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + metadata: Optional[Dict[str, StrictStr]] = None + owner: Optional[StrictStr] = None + project_key: Optional[StrictStr] = Field(default=None, alias="projectKey") + repositories: Optional[List[IoArgoprojEventsV1alpha1BitbucketRepository]] = None + repository_slug: Optional[StrictStr] = Field(default=None, alias="repositorySlug") + webhook: Optional[IoArgoprojEventsV1alpha1WebhookContext] = None + __properties: ClassVar[List[str]] = ["auth", "deleteHookOnFinish", "events", "filter", "metadata", "owner", "projectKey", "repositories", "repositorySlug", "webhook"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth + if self.auth: + _dict['auth'] = self.auth.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in repositories (list) + _items = [] + if self.repositories: + for _item in self.repositories: + if _item: + _items.append(_item.to_dict()) + _dict['repositories'] = _items + # override the default output from pydantic by calling `to_dict()` of webhook + if self.webhook: + _dict['webhook'] = self.webhook.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "auth": IoArgoprojEventsV1alpha1BitbucketAuth.from_dict(obj["auth"]) if obj.get("auth") is not None else None, + "deleteHookOnFinish": obj.get("deleteHookOnFinish"), + "events": obj.get("events"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "metadata": obj.get("metadata"), + "owner": obj.get("owner"), + "projectKey": obj.get("projectKey"), + "repositories": [IoArgoprojEventsV1alpha1BitbucketRepository.from_dict(_item) for _item in obj["repositories"]] if obj.get("repositories") is not None else None, + "repositorySlug": obj.get("repositorySlug"), + "webhook": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhook"]) if obj.get("webhook") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_repository.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_repository.py new file mode 100644 index 000000000000..a9a869ab7a6f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_repository.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1BitbucketRepository(BaseModel): + """ + IoArgoprojEventsV1alpha1BitbucketRepository + """ # noqa: E501 + owner: Optional[StrictStr] = None + repository_slug: Optional[StrictStr] = Field(default=None, alias="repositorySlug") + __properties: ClassVar[List[str]] = ["owner", "repositorySlug"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketRepository from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketRepository from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "owner": obj.get("owner"), + "repositorySlug": obj.get("repositorySlug") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py new file mode 100644 index 000000000000..550da8b36aa4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py @@ -0,0 +1,136 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_server_repository import IoArgoprojEventsV1alpha1BitbucketServerRepository +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1BitbucketServerEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1BitbucketServerEventSource + """ # noqa: E501 + access_token: Optional[SecretKeySelector] = Field(default=None, alias="accessToken") + bitbucketserver_base_url: Optional[StrictStr] = Field(default=None, alias="bitbucketserverBaseURL") + delete_hook_on_finish: Optional[StrictBool] = Field(default=None, alias="deleteHookOnFinish") + events: Optional[List[StrictStr]] = None + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + metadata: Optional[Dict[str, StrictStr]] = None + project_key: Optional[StrictStr] = Field(default=None, alias="projectKey") + repositories: Optional[List[IoArgoprojEventsV1alpha1BitbucketServerRepository]] = None + repository_slug: Optional[StrictStr] = Field(default=None, alias="repositorySlug") + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + webhook: Optional[IoArgoprojEventsV1alpha1WebhookContext] = None + webhook_secret: Optional[SecretKeySelector] = Field(default=None, alias="webhookSecret") + __properties: ClassVar[List[str]] = ["accessToken", "bitbucketserverBaseURL", "deleteHookOnFinish", "events", "filter", "metadata", "projectKey", "repositories", "repositorySlug", "tls", "webhook", "webhookSecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketServerEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_token + if self.access_token: + _dict['accessToken'] = self.access_token.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in repositories (list) + _items = [] + if self.repositories: + for _item in self.repositories: + if _item: + _items.append(_item.to_dict()) + _dict['repositories'] = _items + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook + if self.webhook: + _dict['webhook'] = self.webhook.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook_secret + if self.webhook_secret: + _dict['webhookSecret'] = self.webhook_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketServerEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessToken": SecretKeySelector.from_dict(obj["accessToken"]) if obj.get("accessToken") is not None else None, + "bitbucketserverBaseURL": obj.get("bitbucketserverBaseURL"), + "deleteHookOnFinish": obj.get("deleteHookOnFinish"), + "events": obj.get("events"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "metadata": obj.get("metadata"), + "projectKey": obj.get("projectKey"), + "repositories": [IoArgoprojEventsV1alpha1BitbucketServerRepository.from_dict(_item) for _item in obj["repositories"]] if obj.get("repositories") is not None else None, + "repositorySlug": obj.get("repositorySlug"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "webhook": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhook"]) if obj.get("webhook") is not None else None, + "webhookSecret": SecretKeySelector.from_dict(obj["webhookSecret"]) if obj.get("webhookSecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_server_repository.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_server_repository.py new file mode 100644 index 000000000000..3b86555daf86 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_bitbucket_server_repository.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1BitbucketServerRepository(BaseModel): + """ + IoArgoprojEventsV1alpha1BitbucketServerRepository + """ # noqa: E501 + project_key: Optional[StrictStr] = Field(default=None, alias="projectKey") + repository_slug: Optional[StrictStr] = Field(default=None, alias="repositorySlug") + __properties: ClassVar[List[str]] = ["projectKey", "repositorySlug"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketServerRepository from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1BitbucketServerRepository from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "projectKey": obj.get("projectKey"), + "repositorySlug": obj.get("repositorySlug") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_calendar_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_calendar_event_source.py new file mode 100644 index 000000000000..fb544eaa7a21 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_calendar_event_source.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_persistence import IoArgoprojEventsV1alpha1EventPersistence +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1CalendarEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1CalendarEventSource + """ # noqa: E501 + exclusion_dates: Optional[List[StrictStr]] = Field(default=None, description="ExclusionDates defines the list of DATE-TIME exceptions for recurring events.", alias="exclusionDates") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + interval: Optional[StrictStr] = None + metadata: Optional[Dict[str, StrictStr]] = None + persistence: Optional[IoArgoprojEventsV1alpha1EventPersistence] = None + schedule: Optional[StrictStr] = None + timezone: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["exclusionDates", "filter", "interval", "metadata", "persistence", "schedule", "timezone"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1CalendarEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of persistence + if self.persistence: + _dict['persistence'] = self.persistence.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1CalendarEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "exclusionDates": obj.get("exclusionDates"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "interval": obj.get("interval"), + "metadata": obj.get("metadata"), + "persistence": IoArgoprojEventsV1alpha1EventPersistence.from_dict(obj["persistence"]) if obj.get("persistence") is not None else None, + "schedule": obj.get("schedule"), + "timezone": obj.get("timezone") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_catchup_configuration.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_catchup_configuration.py new file mode 100644 index 000000000000..5d86048d7aff --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_catchup_configuration.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1CatchupConfiguration(BaseModel): + """ + IoArgoprojEventsV1alpha1CatchupConfiguration + """ # noqa: E501 + enabled: Optional[StrictBool] = None + max_duration: Optional[StrictStr] = Field(default=None, alias="maxDuration") + __properties: ClassVar[List[str]] = ["enabled", "maxDuration"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1CatchupConfiguration from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1CatchupConfiguration from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "enabled": obj.get("enabled"), + "maxDuration": obj.get("maxDuration") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_condition.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_condition.py new file mode 100644 index 000000000000..248b066c26ae --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_condition.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Condition(BaseModel): + """ + IoArgoprojEventsV1alpha1Condition + """ # noqa: E501 + last_transition_time: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="lastTransitionTime") + message: Optional[StrictStr] = None + reason: Optional[StrictStr] = None + status: Optional[StrictStr] = None + type: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["lastTransitionTime", "message", "reason", "status", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Condition from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Condition from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "lastTransitionTime": obj.get("lastTransitionTime"), + "message": obj.get("message"), + "reason": obj.get("reason"), + "status": obj.get("status"), + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_conditions_reset_by_time.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_conditions_reset_by_time.py new file mode 100644 index 000000000000..0cd8d066a687 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_conditions_reset_by_time.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1ConditionsResetByTime(BaseModel): + """ + IoArgoprojEventsV1alpha1ConditionsResetByTime + """ # noqa: E501 + cron: Optional[StrictStr] = None + timezone: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["cron", "timezone"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ConditionsResetByTime from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ConditionsResetByTime from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cron": obj.get("cron"), + "timezone": obj.get("timezone") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_conditions_reset_criteria.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_conditions_reset_criteria.py new file mode 100644 index 000000000000..64ae54aaa422 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_conditions_reset_criteria.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_conditions_reset_by_time import IoArgoprojEventsV1alpha1ConditionsResetByTime +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1ConditionsResetCriteria(BaseModel): + """ + IoArgoprojEventsV1alpha1ConditionsResetCriteria + """ # noqa: E501 + by_time: Optional[IoArgoprojEventsV1alpha1ConditionsResetByTime] = Field(default=None, alias="byTime") + __properties: ClassVar[List[str]] = ["byTime"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ConditionsResetCriteria from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of by_time + if self.by_time: + _dict['byTime'] = self.by_time.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ConditionsResetCriteria from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "byTime": IoArgoprojEventsV1alpha1ConditionsResetByTime.from_dict(obj["byTime"]) if obj.get("byTime") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_config_map_persistence.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_config_map_persistence.py new file mode 100644 index 000000000000..5cb78eb3812c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_config_map_persistence.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1ConfigMapPersistence(BaseModel): + """ + IoArgoprojEventsV1alpha1ConfigMapPersistence + """ # noqa: E501 + create_if_not_exist: Optional[StrictBool] = Field(default=None, alias="createIfNotExist") + name: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["createIfNotExist", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ConfigMapPersistence from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ConfigMapPersistence from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "createIfNotExist": obj.get("createIfNotExist"), + "name": obj.get("name") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_custom_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_custom_trigger.py new file mode 100644 index 000000000000..a9f44ccffe9d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_custom_trigger.py @@ -0,0 +1,118 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1CustomTrigger(BaseModel): + """ + CustomTrigger refers to the specification of the custom trigger. + """ # noqa: E501 + cert_secret: Optional[SecretKeySelector] = Field(default=None, alias="certSecret") + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Parameters is the list of parameters that is applied to resolved custom trigger trigger object.") + payload: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Payload is the list of key-value extracted from an event payload to construct the request payload.") + secure: Optional[StrictBool] = None + server_name_override: Optional[StrictStr] = Field(default=None, description="ServerNameOverride for the secure connection between sensor and custom trigger gRPC server.", alias="serverNameOverride") + server_url: Optional[StrictStr] = Field(default=None, alias="serverURL") + spec: Optional[Dict[str, StrictStr]] = Field(default=None, description="Spec is the custom trigger resource specification that custom trigger gRPC server knows how to interpret.") + __properties: ClassVar[List[str]] = ["certSecret", "parameters", "payload", "secure", "serverNameOverride", "serverURL", "spec"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1CustomTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of cert_secret + if self.cert_secret: + _dict['certSecret'] = self.cert_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in payload (list) + _items = [] + if self.payload: + for _item in self.payload: + if _item: + _items.append(_item.to_dict()) + _dict['payload'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1CustomTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "certSecret": SecretKeySelector.from_dict(obj["certSecret"]) if obj.get("certSecret") is not None else None, + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "payload": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["payload"]] if obj.get("payload") is not None else None, + "secure": obj.get("secure"), + "serverNameOverride": obj.get("serverNameOverride"), + "serverURL": obj.get("serverURL"), + "spec": obj.get("spec") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_data_filter.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_data_filter.py new file mode 100644 index 000000000000..9348157d9236 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_data_filter.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1DataFilter(BaseModel): + """ + IoArgoprojEventsV1alpha1DataFilter + """ # noqa: E501 + comparator: Optional[StrictStr] = Field(default=None, description="Comparator compares the event data with a user given value. Can be \">=\", \">\", \"=\", \"!=\", \"<\", or \"<=\". Is optional, and if left blank treated as equality \"=\".") + path: Optional[StrictStr] = Field(default=None, description="Path is the JSONPath of the event's (JSON decoded) data key Path is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.") + template: Optional[StrictStr] = None + type: Optional[StrictStr] = None + value: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["comparator", "path", "template", "type", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1DataFilter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1DataFilter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "comparator": obj.get("comparator"), + "path": obj.get("path"), + "template": obj.get("template"), + "type": obj.get("type"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_email_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_email_trigger.py new file mode 100644 index 000000000000..b5b1cf406712 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_email_trigger.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EmailTrigger(BaseModel): + """ + EmailTrigger refers to the specification of the email notification trigger. + """ # noqa: E501 + body: Optional[StrictStr] = None + var_from: Optional[StrictStr] = Field(default=None, alias="from") + host: Optional[StrictStr] = Field(default=None, description="Host refers to the smtp host url to which email is send.") + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + port: Optional[StrictInt] = None + smtp_password: Optional[SecretKeySelector] = Field(default=None, alias="smtpPassword") + subject: Optional[StrictStr] = None + to: Optional[List[StrictStr]] = None + username: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["body", "from", "host", "parameters", "port", "smtpPassword", "subject", "to", "username"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EmailTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of smtp_password + if self.smtp_password: + _dict['smtpPassword'] = self.smtp_password.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EmailTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "body": obj.get("body"), + "from": obj.get("from"), + "host": obj.get("host"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "port": obj.get("port"), + "smtpPassword": SecretKeySelector.from_dict(obj["smtpPassword"]) if obj.get("smtpPassword") is not None else None, + "subject": obj.get("subject"), + "to": obj.get("to"), + "username": obj.get("username") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_emitter_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_emitter_event_source.py new file mode 100644 index 000000000000..a5581bfd2827 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_emitter_event_source.py @@ -0,0 +1,124 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EmitterEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1EmitterEventSource + """ # noqa: E501 + broker: Optional[StrictStr] = Field(default=None, description="Broker URI to connect to.") + channel_key: Optional[StrictStr] = Field(default=None, alias="channelKey") + channel_name: Optional[StrictStr] = Field(default=None, alias="channelName") + connection_backoff: Optional[IoArgoprojEventsV1alpha1Backoff] = Field(default=None, alias="connectionBackoff") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + password: Optional[SecretKeySelector] = None + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + username: Optional[SecretKeySelector] = None + __properties: ClassVar[List[str]] = ["broker", "channelKey", "channelName", "connectionBackoff", "filter", "jsonBody", "metadata", "password", "tls", "username"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EmitterEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of connection_backoff + if self.connection_backoff: + _dict['connectionBackoff'] = self.connection_backoff.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of password + if self.password: + _dict['password'] = self.password.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + # override the default output from pydantic by calling `to_dict()` of username + if self.username: + _dict['username'] = self.username.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EmitterEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "broker": obj.get("broker"), + "channelKey": obj.get("channelKey"), + "channelName": obj.get("channelName"), + "connectionBackoff": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["connectionBackoff"]) if obj.get("connectionBackoff") is not None else None, + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "password": SecretKeySelector.from_dict(obj["password"]) if obj.get("password") is not None else None, + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "username": SecretKeySelector.from_dict(obj["username"]) if obj.get("username") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_context.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_context.py new file mode 100644 index 000000000000..7d85d751bb93 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_context.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventContext(BaseModel): + """ + IoArgoprojEventsV1alpha1EventContext + """ # noqa: E501 + datacontenttype: Optional[StrictStr] = Field(default=None, description="DataContentType - A MIME (RFC2046) string describing the media type of `data`.") + id: Optional[StrictStr] = Field(default=None, description="ID of the event; must be non-empty and unique within the scope of the producer.") + source: Optional[StrictStr] = Field(default=None, description="Source - A URI describing the event producer.") + specversion: Optional[StrictStr] = Field(default=None, description="SpecVersion - The version of the CloudEvents specification used by the io.argoproj.workflow.v1alpha1.") + subject: Optional[StrictStr] = None + time: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.") + type: Optional[StrictStr] = Field(default=None, description="Type - The type of the occurrence which has happened.") + __properties: ClassVar[List[str]] = ["datacontenttype", "id", "source", "specversion", "subject", "time", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventContext from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventContext from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "datacontenttype": obj.get("datacontenttype"), + "id": obj.get("id"), + "source": obj.get("source"), + "specversion": obj.get("specversion"), + "subject": obj.get("subject"), + "time": obj.get("time"), + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency.py new file mode 100644 index 000000000000..d4b6511e5af4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency_filter import IoArgoprojEventsV1alpha1EventDependencyFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency_transformer import IoArgoprojEventsV1alpha1EventDependencyTransformer +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventDependency(BaseModel): + """ + IoArgoprojEventsV1alpha1EventDependency + """ # noqa: E501 + event_name: Optional[StrictStr] = Field(default=None, alias="eventName") + event_source_name: Optional[StrictStr] = Field(default=None, alias="eventSourceName") + filters: Optional[IoArgoprojEventsV1alpha1EventDependencyFilter] = None + filters_logical_operator: Optional[StrictStr] = Field(default=None, description="FiltersLogicalOperator defines how different filters are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).", alias="filtersLogicalOperator") + name: Optional[StrictStr] = None + transform: Optional[IoArgoprojEventsV1alpha1EventDependencyTransformer] = None + __properties: ClassVar[List[str]] = ["eventName", "eventSourceName", "filters", "filtersLogicalOperator", "name", "transform"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventDependency from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filters + if self.filters: + _dict['filters'] = self.filters.to_dict() + # override the default output from pydantic by calling `to_dict()` of transform + if self.transform: + _dict['transform'] = self.transform.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventDependency from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "eventName": obj.get("eventName"), + "eventSourceName": obj.get("eventSourceName"), + "filters": IoArgoprojEventsV1alpha1EventDependencyFilter.from_dict(obj["filters"]) if obj.get("filters") is not None else None, + "filtersLogicalOperator": obj.get("filtersLogicalOperator"), + "name": obj.get("name"), + "transform": IoArgoprojEventsV1alpha1EventDependencyTransformer.from_dict(obj["transform"]) if obj.get("transform") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency_filter.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency_filter.py new file mode 100644 index 000000000000..6c085396b501 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency_filter.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_data_filter import IoArgoprojEventsV1alpha1DataFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_event_context import IoArgoprojEventsV1alpha1EventContext +from argo_workflows.models.io_argoproj_events_v1alpha1_expr_filter import IoArgoprojEventsV1alpha1ExprFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_time_filter import IoArgoprojEventsV1alpha1TimeFilter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventDependencyFilter(BaseModel): + """ + EventDependencyFilter defines filters and constraints for a io.argoproj.workflow.v1alpha1. + """ # noqa: E501 + context: Optional[IoArgoprojEventsV1alpha1EventContext] = None + data: Optional[List[IoArgoprojEventsV1alpha1DataFilter]] = None + data_logical_operator: Optional[StrictStr] = Field(default=None, description="DataLogicalOperator defines how multiple Data filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).", alias="dataLogicalOperator") + expr_logical_operator: Optional[StrictStr] = Field(default=None, description="ExprLogicalOperator defines how multiple Exprs filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).", alias="exprLogicalOperator") + exprs: Optional[List[IoArgoprojEventsV1alpha1ExprFilter]] = Field(default=None, description="Exprs contains the list of expressions evaluated against the event payload.") + script: Optional[StrictStr] = Field(default=None, description="Script refers to a Lua script evaluated to determine the validity of an io.argoproj.workflow.v1alpha1.") + time: Optional[IoArgoprojEventsV1alpha1TimeFilter] = None + __properties: ClassVar[List[str]] = ["context", "data", "dataLogicalOperator", "exprLogicalOperator", "exprs", "script", "time"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventDependencyFilter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of context + if self.context: + _dict['context'] = self.context.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in data (list) + _items = [] + if self.data: + for _item in self.data: + if _item: + _items.append(_item.to_dict()) + _dict['data'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in exprs (list) + _items = [] + if self.exprs: + for _item in self.exprs: + if _item: + _items.append(_item.to_dict()) + _dict['exprs'] = _items + # override the default output from pydantic by calling `to_dict()` of time + if self.time: + _dict['time'] = self.time.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventDependencyFilter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "context": IoArgoprojEventsV1alpha1EventContext.from_dict(obj["context"]) if obj.get("context") is not None else None, + "data": [IoArgoprojEventsV1alpha1DataFilter.from_dict(_item) for _item in obj["data"]] if obj.get("data") is not None else None, + "dataLogicalOperator": obj.get("dataLogicalOperator"), + "exprLogicalOperator": obj.get("exprLogicalOperator"), + "exprs": [IoArgoprojEventsV1alpha1ExprFilter.from_dict(_item) for _item in obj["exprs"]] if obj.get("exprs") is not None else None, + "script": obj.get("script"), + "time": IoArgoprojEventsV1alpha1TimeFilter.from_dict(obj["time"]) if obj.get("time") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency_transformer.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency_transformer.py new file mode 100644 index 000000000000..430e5b8d8d3d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_dependency_transformer.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventDependencyTransformer(BaseModel): + """ + IoArgoprojEventsV1alpha1EventDependencyTransformer + """ # noqa: E501 + jq: Optional[StrictStr] = None + script: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["jq", "script"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventDependencyTransformer from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventDependencyTransformer from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "jq": obj.get("jq"), + "script": obj.get("script") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_persistence.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_persistence.py new file mode 100644 index 000000000000..26913793fee1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_persistence.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_catchup_configuration import IoArgoprojEventsV1alpha1CatchupConfiguration +from argo_workflows.models.io_argoproj_events_v1alpha1_config_map_persistence import IoArgoprojEventsV1alpha1ConfigMapPersistence +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventPersistence(BaseModel): + """ + IoArgoprojEventsV1alpha1EventPersistence + """ # noqa: E501 + catchup: Optional[IoArgoprojEventsV1alpha1CatchupConfiguration] = None + config_map: Optional[IoArgoprojEventsV1alpha1ConfigMapPersistence] = Field(default=None, alias="configMap") + __properties: ClassVar[List[str]] = ["catchup", "configMap"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventPersistence from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of catchup + if self.catchup: + _dict['catchup'] = self.catchup.to_dict() + # override the default output from pydantic by calling `to_dict()` of config_map + if self.config_map: + _dict['configMap'] = self.config_map.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventPersistence from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "catchup": IoArgoprojEventsV1alpha1CatchupConfiguration.from_dict(obj["catchup"]) if obj.get("catchup") is not None else None, + "configMap": IoArgoprojEventsV1alpha1ConfigMapPersistence.from_dict(obj["configMap"]) if obj.get("configMap") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source.py new file mode 100644 index 000000000000..1f3d27b3ab9c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_spec import IoArgoprojEventsV1alpha1EventSourceSpec +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_status import IoArgoprojEventsV1alpha1EventSourceStatus +from argo_workflows.models.object_meta import ObjectMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1EventSource + """ # noqa: E501 + metadata: Optional[ObjectMeta] = None + spec: Optional[IoArgoprojEventsV1alpha1EventSourceSpec] = None + status: Optional[IoArgoprojEventsV1alpha1EventSourceStatus] = None + __properties: ClassVar[List[str]] = ["metadata", "spec", "status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of spec + if self.spec: + _dict['spec'] = self.spec.to_dict() + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "spec": IoArgoprojEventsV1alpha1EventSourceSpec.from_dict(obj["spec"]) if obj.get("spec") is not None else None, + "status": IoArgoprojEventsV1alpha1EventSourceStatus.from_dict(obj["status"]) if obj.get("status") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_filter.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_filter.py new file mode 100644 index 000000000000..5a6931ea3071 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_filter.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventSourceFilter(BaseModel): + """ + IoArgoprojEventsV1alpha1EventSourceFilter + """ # noqa: E501 + expression: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["expression"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSourceFilter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSourceFilter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "expression": obj.get("expression") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_list.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_list.py new file mode 100644 index 000000000000..42d98c887f4e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_list.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from argo_workflows.models.list_meta import ListMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventSourceList(BaseModel): + """ + IoArgoprojEventsV1alpha1EventSourceList + """ # noqa: E501 + items: Optional[List[IoArgoprojEventsV1alpha1EventSource]] = None + metadata: Optional[ListMeta] = None + __properties: ClassVar[List[str]] = ["items", "metadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSourceList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSourceList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "items": [IoArgoprojEventsV1alpha1EventSource.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "metadata": ListMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_spec.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_spec.py new file mode 100644 index 000000000000..1bb8e2109a63 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_spec.py @@ -0,0 +1,566 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_event_source import IoArgoprojEventsV1alpha1AMQPEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_events_hub_event_source import IoArgoprojEventsV1alpha1AzureEventsHubEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_queue_storage_event_source import IoArgoprojEventsV1alpha1AzureQueueStorageEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_service_bus_event_source import IoArgoprojEventsV1alpha1AzureServiceBusEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_event_source import IoArgoprojEventsV1alpha1BitbucketEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_server_event_source import IoArgoprojEventsV1alpha1BitbucketServerEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_calendar_event_source import IoArgoprojEventsV1alpha1CalendarEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_emitter_event_source import IoArgoprojEventsV1alpha1EmitterEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_file_event_source import IoArgoprojEventsV1alpha1FileEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_generic_event_source import IoArgoprojEventsV1alpha1GenericEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_gerrit_event_source import IoArgoprojEventsV1alpha1GerritEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_github_event_source import IoArgoprojEventsV1alpha1GithubEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_gitlab_event_source import IoArgoprojEventsV1alpha1GitlabEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_hdfs_event_source import IoArgoprojEventsV1alpha1HDFSEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_event_source import IoArgoprojEventsV1alpha1KafkaEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_mqtt_event_source import IoArgoprojEventsV1alpha1MQTTEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_events_source import IoArgoprojEventsV1alpha1NATSEventsSource +from argo_workflows.models.io_argoproj_events_v1alpha1_nsq_event_source import IoArgoprojEventsV1alpha1NSQEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_pub_sub_event_source import IoArgoprojEventsV1alpha1PubSubEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_pulsar_event_source import IoArgoprojEventsV1alpha1PulsarEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_redis_event_source import IoArgoprojEventsV1alpha1RedisEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_redis_stream_event_source import IoArgoprojEventsV1alpha1RedisStreamEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_resource_event_source import IoArgoprojEventsV1alpha1ResourceEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact +from argo_workflows.models.io_argoproj_events_v1alpha1_service import IoArgoprojEventsV1alpha1Service +from argo_workflows.models.io_argoproj_events_v1alpha1_sftp_event_source import IoArgoprojEventsV1alpha1SFTPEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_event_source import IoArgoprojEventsV1alpha1SlackEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_sns_event_source import IoArgoprojEventsV1alpha1SNSEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_sqs_event_source import IoArgoprojEventsV1alpha1SQSEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_storage_grid_event_source import IoArgoprojEventsV1alpha1StorageGridEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_stripe_event_source import IoArgoprojEventsV1alpha1StripeEventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_event_source import IoArgoprojEventsV1alpha1WebhookEventSource +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventSourceSpec(BaseModel): + """ + IoArgoprojEventsV1alpha1EventSourceSpec + """ # noqa: E501 + amqp: Optional[Dict[str, IoArgoprojEventsV1alpha1AMQPEventSource]] = None + azure_events_hub: Optional[Dict[str, IoArgoprojEventsV1alpha1AzureEventsHubEventSource]] = Field(default=None, alias="azureEventsHub") + azure_queue_storage: Optional[Dict[str, IoArgoprojEventsV1alpha1AzureQueueStorageEventSource]] = Field(default=None, alias="azureQueueStorage") + azure_service_bus: Optional[Dict[str, IoArgoprojEventsV1alpha1AzureServiceBusEventSource]] = Field(default=None, alias="azureServiceBus") + bitbucket: Optional[Dict[str, IoArgoprojEventsV1alpha1BitbucketEventSource]] = None + bitbucketserver: Optional[Dict[str, IoArgoprojEventsV1alpha1BitbucketServerEventSource]] = None + calendar: Optional[Dict[str, IoArgoprojEventsV1alpha1CalendarEventSource]] = None + emitter: Optional[Dict[str, IoArgoprojEventsV1alpha1EmitterEventSource]] = None + event_bus_name: Optional[StrictStr] = Field(default=None, alias="eventBusName") + file: Optional[Dict[str, IoArgoprojEventsV1alpha1FileEventSource]] = None + generic: Optional[Dict[str, IoArgoprojEventsV1alpha1GenericEventSource]] = None + gerrit: Optional[Dict[str, IoArgoprojEventsV1alpha1GerritEventSource]] = None + github: Optional[Dict[str, IoArgoprojEventsV1alpha1GithubEventSource]] = None + gitlab: Optional[Dict[str, IoArgoprojEventsV1alpha1GitlabEventSource]] = None + hdfs: Optional[Dict[str, IoArgoprojEventsV1alpha1HDFSEventSource]] = None + kafka: Optional[Dict[str, IoArgoprojEventsV1alpha1KafkaEventSource]] = None + minio: Optional[Dict[str, IoArgoprojEventsV1alpha1S3Artifact]] = None + mqtt: Optional[Dict[str, IoArgoprojEventsV1alpha1MQTTEventSource]] = None + nats: Optional[Dict[str, IoArgoprojEventsV1alpha1NATSEventsSource]] = None + nsq: Optional[Dict[str, IoArgoprojEventsV1alpha1NSQEventSource]] = None + pub_sub: Optional[Dict[str, IoArgoprojEventsV1alpha1PubSubEventSource]] = Field(default=None, alias="pubSub") + pulsar: Optional[Dict[str, IoArgoprojEventsV1alpha1PulsarEventSource]] = None + redis: Optional[Dict[str, IoArgoprojEventsV1alpha1RedisEventSource]] = None + redis_stream: Optional[Dict[str, IoArgoprojEventsV1alpha1RedisStreamEventSource]] = Field(default=None, alias="redisStream") + replicas: Optional[StrictInt] = None + resource: Optional[Dict[str, IoArgoprojEventsV1alpha1ResourceEventSource]] = None + service: Optional[IoArgoprojEventsV1alpha1Service] = None + sftp: Optional[Dict[str, IoArgoprojEventsV1alpha1SFTPEventSource]] = None + slack: Optional[Dict[str, IoArgoprojEventsV1alpha1SlackEventSource]] = None + sns: Optional[Dict[str, IoArgoprojEventsV1alpha1SNSEventSource]] = None + sqs: Optional[Dict[str, IoArgoprojEventsV1alpha1SQSEventSource]] = None + storage_grid: Optional[Dict[str, IoArgoprojEventsV1alpha1StorageGridEventSource]] = Field(default=None, alias="storageGrid") + stripe: Optional[Dict[str, IoArgoprojEventsV1alpha1StripeEventSource]] = None + template: Optional[IoArgoprojEventsV1alpha1Template] = None + webhook: Optional[Dict[str, IoArgoprojEventsV1alpha1WebhookEventSource]] = None + __properties: ClassVar[List[str]] = ["amqp", "azureEventsHub", "azureQueueStorage", "azureServiceBus", "bitbucket", "bitbucketserver", "calendar", "emitter", "eventBusName", "file", "generic", "gerrit", "github", "gitlab", "hdfs", "kafka", "minio", "mqtt", "nats", "nsq", "pubSub", "pulsar", "redis", "redisStream", "replicas", "resource", "service", "sftp", "slack", "sns", "sqs", "storageGrid", "stripe", "template", "webhook"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSourceSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each value in amqp (dict) + _field_dict = {} + if self.amqp: + for _key in self.amqp: + if self.amqp[_key]: + _field_dict[_key] = self.amqp[_key].to_dict() + _dict['amqp'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in azure_events_hub (dict) + _field_dict = {} + if self.azure_events_hub: + for _key in self.azure_events_hub: + if self.azure_events_hub[_key]: + _field_dict[_key] = self.azure_events_hub[_key].to_dict() + _dict['azureEventsHub'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in azure_queue_storage (dict) + _field_dict = {} + if self.azure_queue_storage: + for _key in self.azure_queue_storage: + if self.azure_queue_storage[_key]: + _field_dict[_key] = self.azure_queue_storage[_key].to_dict() + _dict['azureQueueStorage'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in azure_service_bus (dict) + _field_dict = {} + if self.azure_service_bus: + for _key in self.azure_service_bus: + if self.azure_service_bus[_key]: + _field_dict[_key] = self.azure_service_bus[_key].to_dict() + _dict['azureServiceBus'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in bitbucket (dict) + _field_dict = {} + if self.bitbucket: + for _key in self.bitbucket: + if self.bitbucket[_key]: + _field_dict[_key] = self.bitbucket[_key].to_dict() + _dict['bitbucket'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in bitbucketserver (dict) + _field_dict = {} + if self.bitbucketserver: + for _key in self.bitbucketserver: + if self.bitbucketserver[_key]: + _field_dict[_key] = self.bitbucketserver[_key].to_dict() + _dict['bitbucketserver'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in calendar (dict) + _field_dict = {} + if self.calendar: + for _key in self.calendar: + if self.calendar[_key]: + _field_dict[_key] = self.calendar[_key].to_dict() + _dict['calendar'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in emitter (dict) + _field_dict = {} + if self.emitter: + for _key in self.emitter: + if self.emitter[_key]: + _field_dict[_key] = self.emitter[_key].to_dict() + _dict['emitter'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in file (dict) + _field_dict = {} + if self.file: + for _key in self.file: + if self.file[_key]: + _field_dict[_key] = self.file[_key].to_dict() + _dict['file'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in generic (dict) + _field_dict = {} + if self.generic: + for _key in self.generic: + if self.generic[_key]: + _field_dict[_key] = self.generic[_key].to_dict() + _dict['generic'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in gerrit (dict) + _field_dict = {} + if self.gerrit: + for _key in self.gerrit: + if self.gerrit[_key]: + _field_dict[_key] = self.gerrit[_key].to_dict() + _dict['gerrit'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in github (dict) + _field_dict = {} + if self.github: + for _key in self.github: + if self.github[_key]: + _field_dict[_key] = self.github[_key].to_dict() + _dict['github'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in gitlab (dict) + _field_dict = {} + if self.gitlab: + for _key in self.gitlab: + if self.gitlab[_key]: + _field_dict[_key] = self.gitlab[_key].to_dict() + _dict['gitlab'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in hdfs (dict) + _field_dict = {} + if self.hdfs: + for _key in self.hdfs: + if self.hdfs[_key]: + _field_dict[_key] = self.hdfs[_key].to_dict() + _dict['hdfs'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in kafka (dict) + _field_dict = {} + if self.kafka: + for _key in self.kafka: + if self.kafka[_key]: + _field_dict[_key] = self.kafka[_key].to_dict() + _dict['kafka'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in minio (dict) + _field_dict = {} + if self.minio: + for _key in self.minio: + if self.minio[_key]: + _field_dict[_key] = self.minio[_key].to_dict() + _dict['minio'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in mqtt (dict) + _field_dict = {} + if self.mqtt: + for _key in self.mqtt: + if self.mqtt[_key]: + _field_dict[_key] = self.mqtt[_key].to_dict() + _dict['mqtt'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in nats (dict) + _field_dict = {} + if self.nats: + for _key in self.nats: + if self.nats[_key]: + _field_dict[_key] = self.nats[_key].to_dict() + _dict['nats'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in nsq (dict) + _field_dict = {} + if self.nsq: + for _key in self.nsq: + if self.nsq[_key]: + _field_dict[_key] = self.nsq[_key].to_dict() + _dict['nsq'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in pub_sub (dict) + _field_dict = {} + if self.pub_sub: + for _key in self.pub_sub: + if self.pub_sub[_key]: + _field_dict[_key] = self.pub_sub[_key].to_dict() + _dict['pubSub'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in pulsar (dict) + _field_dict = {} + if self.pulsar: + for _key in self.pulsar: + if self.pulsar[_key]: + _field_dict[_key] = self.pulsar[_key].to_dict() + _dict['pulsar'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in redis (dict) + _field_dict = {} + if self.redis: + for _key in self.redis: + if self.redis[_key]: + _field_dict[_key] = self.redis[_key].to_dict() + _dict['redis'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in redis_stream (dict) + _field_dict = {} + if self.redis_stream: + for _key in self.redis_stream: + if self.redis_stream[_key]: + _field_dict[_key] = self.redis_stream[_key].to_dict() + _dict['redisStream'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in resource (dict) + _field_dict = {} + if self.resource: + for _key in self.resource: + if self.resource[_key]: + _field_dict[_key] = self.resource[_key].to_dict() + _dict['resource'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of service + if self.service: + _dict['service'] = self.service.to_dict() + # override the default output from pydantic by calling `to_dict()` of each value in sftp (dict) + _field_dict = {} + if self.sftp: + for _key in self.sftp: + if self.sftp[_key]: + _field_dict[_key] = self.sftp[_key].to_dict() + _dict['sftp'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in slack (dict) + _field_dict = {} + if self.slack: + for _key in self.slack: + if self.slack[_key]: + _field_dict[_key] = self.slack[_key].to_dict() + _dict['slack'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in sns (dict) + _field_dict = {} + if self.sns: + for _key in self.sns: + if self.sns[_key]: + _field_dict[_key] = self.sns[_key].to_dict() + _dict['sns'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in sqs (dict) + _field_dict = {} + if self.sqs: + for _key in self.sqs: + if self.sqs[_key]: + _field_dict[_key] = self.sqs[_key].to_dict() + _dict['sqs'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in storage_grid (dict) + _field_dict = {} + if self.storage_grid: + for _key in self.storage_grid: + if self.storage_grid[_key]: + _field_dict[_key] = self.storage_grid[_key].to_dict() + _dict['storageGrid'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in stripe (dict) + _field_dict = {} + if self.stripe: + for _key in self.stripe: + if self.stripe[_key]: + _field_dict[_key] = self.stripe[_key].to_dict() + _dict['stripe'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of template + if self.template: + _dict['template'] = self.template.to_dict() + # override the default output from pydantic by calling `to_dict()` of each value in webhook (dict) + _field_dict = {} + if self.webhook: + for _key in self.webhook: + if self.webhook[_key]: + _field_dict[_key] = self.webhook[_key].to_dict() + _dict['webhook'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSourceSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "amqp": dict( + (_k, IoArgoprojEventsV1alpha1AMQPEventSource.from_dict(_v)) + for _k, _v in obj["amqp"].items() + ) + if obj.get("amqp") is not None + else None, + "azureEventsHub": dict( + (_k, IoArgoprojEventsV1alpha1AzureEventsHubEventSource.from_dict(_v)) + for _k, _v in obj["azureEventsHub"].items() + ) + if obj.get("azureEventsHub") is not None + else None, + "azureQueueStorage": dict( + (_k, IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.from_dict(_v)) + for _k, _v in obj["azureQueueStorage"].items() + ) + if obj.get("azureQueueStorage") is not None + else None, + "azureServiceBus": dict( + (_k, IoArgoprojEventsV1alpha1AzureServiceBusEventSource.from_dict(_v)) + for _k, _v in obj["azureServiceBus"].items() + ) + if obj.get("azureServiceBus") is not None + else None, + "bitbucket": dict( + (_k, IoArgoprojEventsV1alpha1BitbucketEventSource.from_dict(_v)) + for _k, _v in obj["bitbucket"].items() + ) + if obj.get("bitbucket") is not None + else None, + "bitbucketserver": dict( + (_k, IoArgoprojEventsV1alpha1BitbucketServerEventSource.from_dict(_v)) + for _k, _v in obj["bitbucketserver"].items() + ) + if obj.get("bitbucketserver") is not None + else None, + "calendar": dict( + (_k, IoArgoprojEventsV1alpha1CalendarEventSource.from_dict(_v)) + for _k, _v in obj["calendar"].items() + ) + if obj.get("calendar") is not None + else None, + "emitter": dict( + (_k, IoArgoprojEventsV1alpha1EmitterEventSource.from_dict(_v)) + for _k, _v in obj["emitter"].items() + ) + if obj.get("emitter") is not None + else None, + "eventBusName": obj.get("eventBusName"), + "file": dict( + (_k, IoArgoprojEventsV1alpha1FileEventSource.from_dict(_v)) + for _k, _v in obj["file"].items() + ) + if obj.get("file") is not None + else None, + "generic": dict( + (_k, IoArgoprojEventsV1alpha1GenericEventSource.from_dict(_v)) + for _k, _v in obj["generic"].items() + ) + if obj.get("generic") is not None + else None, + "gerrit": dict( + (_k, IoArgoprojEventsV1alpha1GerritEventSource.from_dict(_v)) + for _k, _v in obj["gerrit"].items() + ) + if obj.get("gerrit") is not None + else None, + "github": dict( + (_k, IoArgoprojEventsV1alpha1GithubEventSource.from_dict(_v)) + for _k, _v in obj["github"].items() + ) + if obj.get("github") is not None + else None, + "gitlab": dict( + (_k, IoArgoprojEventsV1alpha1GitlabEventSource.from_dict(_v)) + for _k, _v in obj["gitlab"].items() + ) + if obj.get("gitlab") is not None + else None, + "hdfs": dict( + (_k, IoArgoprojEventsV1alpha1HDFSEventSource.from_dict(_v)) + for _k, _v in obj["hdfs"].items() + ) + if obj.get("hdfs") is not None + else None, + "kafka": dict( + (_k, IoArgoprojEventsV1alpha1KafkaEventSource.from_dict(_v)) + for _k, _v in obj["kafka"].items() + ) + if obj.get("kafka") is not None + else None, + "minio": dict( + (_k, IoArgoprojEventsV1alpha1S3Artifact.from_dict(_v)) + for _k, _v in obj["minio"].items() + ) + if obj.get("minio") is not None + else None, + "mqtt": dict( + (_k, IoArgoprojEventsV1alpha1MQTTEventSource.from_dict(_v)) + for _k, _v in obj["mqtt"].items() + ) + if obj.get("mqtt") is not None + else None, + "nats": dict( + (_k, IoArgoprojEventsV1alpha1NATSEventsSource.from_dict(_v)) + for _k, _v in obj["nats"].items() + ) + if obj.get("nats") is not None + else None, + "nsq": dict( + (_k, IoArgoprojEventsV1alpha1NSQEventSource.from_dict(_v)) + for _k, _v in obj["nsq"].items() + ) + if obj.get("nsq") is not None + else None, + "pubSub": dict( + (_k, IoArgoprojEventsV1alpha1PubSubEventSource.from_dict(_v)) + for _k, _v in obj["pubSub"].items() + ) + if obj.get("pubSub") is not None + else None, + "pulsar": dict( + (_k, IoArgoprojEventsV1alpha1PulsarEventSource.from_dict(_v)) + for _k, _v in obj["pulsar"].items() + ) + if obj.get("pulsar") is not None + else None, + "redis": dict( + (_k, IoArgoprojEventsV1alpha1RedisEventSource.from_dict(_v)) + for _k, _v in obj["redis"].items() + ) + if obj.get("redis") is not None + else None, + "redisStream": dict( + (_k, IoArgoprojEventsV1alpha1RedisStreamEventSource.from_dict(_v)) + for _k, _v in obj["redisStream"].items() + ) + if obj.get("redisStream") is not None + else None, + "replicas": obj.get("replicas"), + "resource": dict( + (_k, IoArgoprojEventsV1alpha1ResourceEventSource.from_dict(_v)) + for _k, _v in obj["resource"].items() + ) + if obj.get("resource") is not None + else None, + "service": IoArgoprojEventsV1alpha1Service.from_dict(obj["service"]) if obj.get("service") is not None else None, + "sftp": dict( + (_k, IoArgoprojEventsV1alpha1SFTPEventSource.from_dict(_v)) + for _k, _v in obj["sftp"].items() + ) + if obj.get("sftp") is not None + else None, + "slack": dict( + (_k, IoArgoprojEventsV1alpha1SlackEventSource.from_dict(_v)) + for _k, _v in obj["slack"].items() + ) + if obj.get("slack") is not None + else None, + "sns": dict( + (_k, IoArgoprojEventsV1alpha1SNSEventSource.from_dict(_v)) + for _k, _v in obj["sns"].items() + ) + if obj.get("sns") is not None + else None, + "sqs": dict( + (_k, IoArgoprojEventsV1alpha1SQSEventSource.from_dict(_v)) + for _k, _v in obj["sqs"].items() + ) + if obj.get("sqs") is not None + else None, + "storageGrid": dict( + (_k, IoArgoprojEventsV1alpha1StorageGridEventSource.from_dict(_v)) + for _k, _v in obj["storageGrid"].items() + ) + if obj.get("storageGrid") is not None + else None, + "stripe": dict( + (_k, IoArgoprojEventsV1alpha1StripeEventSource.from_dict(_v)) + for _k, _v in obj["stripe"].items() + ) + if obj.get("stripe") is not None + else None, + "template": IoArgoprojEventsV1alpha1Template.from_dict(obj["template"]) if obj.get("template") is not None else None, + "webhook": dict( + (_k, IoArgoprojEventsV1alpha1WebhookEventSource.from_dict(_v)) + for _k, _v in obj["webhook"].items() + ) + if obj.get("webhook") is not None + else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_status.py new file mode 100644 index 000000000000..504661c7df2f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_event_source_status.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1EventSourceStatus(BaseModel): + """ + IoArgoprojEventsV1alpha1EventSourceStatus + """ # noqa: E501 + status: Optional[IoArgoprojEventsV1alpha1Status] = None + __properties: ClassVar[List[str]] = ["status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSourceStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1EventSourceStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "status": IoArgoprojEventsV1alpha1Status.from_dict(obj["status"]) if obj.get("status") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_expr_filter.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_expr_filter.py new file mode 100644 index 000000000000..953b2ce4b6b4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_expr_filter.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_payload_field import IoArgoprojEventsV1alpha1PayloadField +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1ExprFilter(BaseModel): + """ + IoArgoprojEventsV1alpha1ExprFilter + """ # noqa: E501 + expr: Optional[StrictStr] = Field(default=None, description="Expr refers to the expression that determines the outcome of the filter.") + fields: Optional[List[IoArgoprojEventsV1alpha1PayloadField]] = Field(default=None, description="Fields refers to set of keys that refer to the paths within event payload.") + __properties: ClassVar[List[str]] = ["expr", "fields"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ExprFilter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in fields (list) + _items = [] + if self.fields: + for _item in self.fields: + if _item: + _items.append(_item.to_dict()) + _dict['fields'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ExprFilter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "expr": obj.get("expr"), + "fields": [IoArgoprojEventsV1alpha1PayloadField.from_dict(_item) for _item in obj["fields"]] if obj.get("fields") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_file_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_file_artifact.py new file mode 100644 index 000000000000..297bb45903dd --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_file_artifact.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1FileArtifact(BaseModel): + """ + IoArgoprojEventsV1alpha1FileArtifact + """ # noqa: E501 + path: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["path"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1FileArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1FileArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "path": obj.get("path") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_file_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_file_event_source.py new file mode 100644 index 000000000000..7e7f2e4472ff --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_file_event_source.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1FileEventSource(BaseModel): + """ + FileEventSource describes an event-source for file related events. + """ # noqa: E501 + event_type: Optional[StrictStr] = Field(default=None, alias="eventType") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + metadata: Optional[Dict[str, StrictStr]] = None + polling: Optional[StrictBool] = None + watch_path_config: Optional[IoArgoprojEventsV1alpha1WatchPathConfig] = Field(default=None, alias="watchPathConfig") + __properties: ClassVar[List[str]] = ["eventType", "filter", "metadata", "polling", "watchPathConfig"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1FileEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of watch_path_config + if self.watch_path_config: + _dict['watchPathConfig'] = self.watch_path_config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1FileEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "eventType": obj.get("eventType"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "metadata": obj.get("metadata"), + "polling": obj.get("polling"), + "watchPathConfig": IoArgoprojEventsV1alpha1WatchPathConfig.from_dict(obj["watchPathConfig"]) if obj.get("watchPathConfig") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_generic_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_generic_event_source.py new file mode 100644 index 000000000000..7a24fb696c4d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_generic_event_source.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1GenericEventSource(BaseModel): + """ + GenericEventSource refers to a generic event source. It can be used to implement a custom event source. + """ # noqa: E501 + auth_secret: Optional[SecretKeySelector] = Field(default=None, alias="authSecret") + config: Optional[StrictStr] = None + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + insecure: Optional[StrictBool] = Field(default=None, description="Insecure determines the type of connection.") + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + url: Optional[StrictStr] = Field(default=None, description="URL of the gRPC server that implements the event source.") + __properties: ClassVar[List[str]] = ["authSecret", "config", "filter", "insecure", "jsonBody", "metadata", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GenericEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth_secret + if self.auth_secret: + _dict['authSecret'] = self.auth_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GenericEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "authSecret": SecretKeySelector.from_dict(obj["authSecret"]) if obj.get("authSecret") is not None else None, + "config": obj.get("config"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "insecure": obj.get("insecure"), + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_gerrit_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_gerrit_event_source.py new file mode 100644 index 000000000000..83c856c02a5c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_gerrit_event_source.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1GerritEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1GerritEventSource + """ # noqa: E501 + auth: Optional[IoArgoprojEventsV1alpha1BasicAuth] = None + delete_hook_on_finish: Optional[StrictBool] = Field(default=None, alias="deleteHookOnFinish") + events: Optional[List[StrictStr]] = None + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + gerrit_base_url: Optional[StrictStr] = Field(default=None, alias="gerritBaseURL") + hook_name: Optional[StrictStr] = Field(default=None, alias="hookName") + metadata: Optional[Dict[str, StrictStr]] = None + projects: Optional[List[StrictStr]] = Field(default=None, description="List of project namespace paths like \"whynowy/test\".") + ssl_verify: Optional[StrictBool] = Field(default=None, alias="sslVerify") + webhook: Optional[IoArgoprojEventsV1alpha1WebhookContext] = None + __properties: ClassVar[List[str]] = ["auth", "deleteHookOnFinish", "events", "filter", "gerritBaseURL", "hookName", "metadata", "projects", "sslVerify", "webhook"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GerritEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth + if self.auth: + _dict['auth'] = self.auth.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook + if self.webhook: + _dict['webhook'] = self.webhook.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GerritEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "auth": IoArgoprojEventsV1alpha1BasicAuth.from_dict(obj["auth"]) if obj.get("auth") is not None else None, + "deleteHookOnFinish": obj.get("deleteHookOnFinish"), + "events": obj.get("events"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "gerritBaseURL": obj.get("gerritBaseURL"), + "hookName": obj.get("hookName"), + "metadata": obj.get("metadata"), + "projects": obj.get("projects"), + "sslVerify": obj.get("sslVerify"), + "webhook": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhook"]) if obj.get("webhook") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_artifact.py new file mode 100644 index 000000000000..95b8eeaf1780 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_artifact.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_git_creds import IoArgoprojEventsV1alpha1GitCreds +from argo_workflows.models.io_argoproj_events_v1alpha1_git_remote_config import IoArgoprojEventsV1alpha1GitRemoteConfig +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1GitArtifact(BaseModel): + """ + IoArgoprojEventsV1alpha1GitArtifact + """ # noqa: E501 + branch: Optional[StrictStr] = None + clone_directory: Optional[StrictStr] = Field(default=None, description="Directory to clone the repository. We clone complete directory because GitArtifact is not limited to any specific Git service providers. Hence we don't use any specific git provider client.", alias="cloneDirectory") + creds: Optional[IoArgoprojEventsV1alpha1GitCreds] = None + file_path: Optional[StrictStr] = Field(default=None, alias="filePath") + insecure_ignore_host_key: Optional[StrictBool] = Field(default=None, alias="insecureIgnoreHostKey") + ref: Optional[StrictStr] = None + remote: Optional[IoArgoprojEventsV1alpha1GitRemoteConfig] = None + ssh_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="sshKeySecret") + tag: Optional[StrictStr] = None + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["branch", "cloneDirectory", "creds", "filePath", "insecureIgnoreHostKey", "ref", "remote", "sshKeySecret", "tag", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GitArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creds + if self.creds: + _dict['creds'] = self.creds.to_dict() + # override the default output from pydantic by calling `to_dict()` of remote + if self.remote: + _dict['remote'] = self.remote.to_dict() + # override the default output from pydantic by calling `to_dict()` of ssh_key_secret + if self.ssh_key_secret: + _dict['sshKeySecret'] = self.ssh_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GitArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "branch": obj.get("branch"), + "cloneDirectory": obj.get("cloneDirectory"), + "creds": IoArgoprojEventsV1alpha1GitCreds.from_dict(obj["creds"]) if obj.get("creds") is not None else None, + "filePath": obj.get("filePath"), + "insecureIgnoreHostKey": obj.get("insecureIgnoreHostKey"), + "ref": obj.get("ref"), + "remote": IoArgoprojEventsV1alpha1GitRemoteConfig.from_dict(obj["remote"]) if obj.get("remote") is not None else None, + "sshKeySecret": SecretKeySelector.from_dict(obj["sshKeySecret"]) if obj.get("sshKeySecret") is not None else None, + "tag": obj.get("tag"), + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_creds.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_creds.py new file mode 100644 index 000000000000..b5b42bcb42a0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_creds.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1GitCreds(BaseModel): + """ + IoArgoprojEventsV1alpha1GitCreds + """ # noqa: E501 + password: Optional[SecretKeySelector] = None + username: Optional[SecretKeySelector] = None + __properties: ClassVar[List[str]] = ["password", "username"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GitCreds from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of password + if self.password: + _dict['password'] = self.password.to_dict() + # override the default output from pydantic by calling `to_dict()` of username + if self.username: + _dict['username'] = self.username.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GitCreds from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "password": SecretKeySelector.from_dict(obj["password"]) if obj.get("password") is not None else None, + "username": SecretKeySelector.from_dict(obj["username"]) if obj.get("username") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_remote_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_remote_config.py new file mode 100644 index 000000000000..3bfb6f655fe6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_git_remote_config.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1GitRemoteConfig(BaseModel): + """ + IoArgoprojEventsV1alpha1GitRemoteConfig + """ # noqa: E501 + name: Optional[StrictStr] = Field(default=None, description="Name of the remote to fetch from.") + urls: Optional[List[StrictStr]] = Field(default=None, description="URLs the URLs of a remote repository. It must be non-empty. Fetch will always use the first URL, while push will use all of them.") + __properties: ClassVar[List[str]] = ["name", "urls"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GitRemoteConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GitRemoteConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "urls": obj.get("urls") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_github_app_creds.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_github_app_creds.py new file mode 100644 index 000000000000..3d247b75d67c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_github_app_creds.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1GithubAppCreds(BaseModel): + """ + IoArgoprojEventsV1alpha1GithubAppCreds + """ # noqa: E501 + app_id: Optional[StrictStr] = Field(default=None, alias="appID") + installation_id: Optional[StrictStr] = Field(default=None, alias="installationID") + private_key: Optional[SecretKeySelector] = Field(default=None, alias="privateKey") + __properties: ClassVar[List[str]] = ["appID", "installationID", "privateKey"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GithubAppCreds from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of private_key + if self.private_key: + _dict['privateKey'] = self.private_key.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GithubAppCreds from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "appID": obj.get("appID"), + "installationID": obj.get("installationID"), + "privateKey": SecretKeySelector.from_dict(obj["privateKey"]) if obj.get("privateKey") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_github_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_github_event_source.py new file mode 100644 index 000000000000..ff2df895f4fc --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_github_event_source.py @@ -0,0 +1,148 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_github_app_creds import IoArgoprojEventsV1alpha1GithubAppCreds +from argo_workflows.models.io_argoproj_events_v1alpha1_owned_repositories import IoArgoprojEventsV1alpha1OwnedRepositories +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1GithubEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1GithubEventSource + """ # noqa: E501 + active: Optional[StrictBool] = None + api_token: Optional[SecretKeySelector] = Field(default=None, alias="apiToken") + content_type: Optional[StrictStr] = Field(default=None, alias="contentType") + delete_hook_on_finish: Optional[StrictBool] = Field(default=None, alias="deleteHookOnFinish") + events: Optional[List[StrictStr]] = None + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + github_app: Optional[IoArgoprojEventsV1alpha1GithubAppCreds] = Field(default=None, alias="githubApp") + github_base_url: Optional[StrictStr] = Field(default=None, alias="githubBaseURL") + github_upload_url: Optional[StrictStr] = Field(default=None, alias="githubUploadURL") + id: Optional[StrictStr] = None + insecure: Optional[StrictBool] = None + metadata: Optional[Dict[str, StrictStr]] = None + organizations: Optional[List[StrictStr]] = Field(default=None, description="Organizations holds the names of organizations (used for organization level webhooks). Not required if Repositories is set.") + owner: Optional[StrictStr] = None + repositories: Optional[List[IoArgoprojEventsV1alpha1OwnedRepositories]] = Field(default=None, description="Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set.") + repository: Optional[StrictStr] = None + webhook: Optional[IoArgoprojEventsV1alpha1WebhookContext] = None + webhook_secret: Optional[SecretKeySelector] = Field(default=None, alias="webhookSecret") + __properties: ClassVar[List[str]] = ["active", "apiToken", "contentType", "deleteHookOnFinish", "events", "filter", "githubApp", "githubBaseURL", "githubUploadURL", "id", "insecure", "metadata", "organizations", "owner", "repositories", "repository", "webhook", "webhookSecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GithubEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of api_token + if self.api_token: + _dict['apiToken'] = self.api_token.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of github_app + if self.github_app: + _dict['githubApp'] = self.github_app.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in repositories (list) + _items = [] + if self.repositories: + for _item in self.repositories: + if _item: + _items.append(_item.to_dict()) + _dict['repositories'] = _items + # override the default output from pydantic by calling `to_dict()` of webhook + if self.webhook: + _dict['webhook'] = self.webhook.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook_secret + if self.webhook_secret: + _dict['webhookSecret'] = self.webhook_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GithubEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "active": obj.get("active"), + "apiToken": SecretKeySelector.from_dict(obj["apiToken"]) if obj.get("apiToken") is not None else None, + "contentType": obj.get("contentType"), + "deleteHookOnFinish": obj.get("deleteHookOnFinish"), + "events": obj.get("events"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "githubApp": IoArgoprojEventsV1alpha1GithubAppCreds.from_dict(obj["githubApp"]) if obj.get("githubApp") is not None else None, + "githubBaseURL": obj.get("githubBaseURL"), + "githubUploadURL": obj.get("githubUploadURL"), + "id": obj.get("id"), + "insecure": obj.get("insecure"), + "metadata": obj.get("metadata"), + "organizations": obj.get("organizations"), + "owner": obj.get("owner"), + "repositories": [IoArgoprojEventsV1alpha1OwnedRepositories.from_dict(_item) for _item in obj["repositories"]] if obj.get("repositories") is not None else None, + "repository": obj.get("repository"), + "webhook": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhook"]) if obj.get("webhook") is not None else None, + "webhookSecret": SecretKeySelector.from_dict(obj["webhookSecret"]) if obj.get("webhookSecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_gitlab_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_gitlab_event_source.py new file mode 100644 index 000000000000..ed76eb1a62de --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_gitlab_event_source.py @@ -0,0 +1,124 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1GitlabEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1GitlabEventSource + """ # noqa: E501 + access_token: Optional[SecretKeySelector] = Field(default=None, alias="accessToken") + delete_hook_on_finish: Optional[StrictBool] = Field(default=None, alias="deleteHookOnFinish") + enable_ssl_verification: Optional[StrictBool] = Field(default=None, alias="enableSSLVerification") + events: Optional[List[StrictStr]] = Field(default=None, description="Events are gitlab event to listen to. Refer https://github.com/xanzy/go-gitlab/blob/bf34eca5d13a9f4c3f501d8a97b8ac226d55e4d9/projects.go#L794.") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + gitlab_base_url: Optional[StrictStr] = Field(default=None, alias="gitlabBaseURL") + groups: Optional[List[StrictStr]] = None + metadata: Optional[Dict[str, StrictStr]] = None + project_id: Optional[StrictStr] = Field(default=None, alias="projectID") + projects: Optional[List[StrictStr]] = None + secret_token: Optional[SecretKeySelector] = Field(default=None, alias="secretToken") + webhook: Optional[IoArgoprojEventsV1alpha1WebhookContext] = None + __properties: ClassVar[List[str]] = ["accessToken", "deleteHookOnFinish", "enableSSLVerification", "events", "filter", "gitlabBaseURL", "groups", "metadata", "projectID", "projects", "secretToken", "webhook"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GitlabEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_token + if self.access_token: + _dict['accessToken'] = self.access_token.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_token + if self.secret_token: + _dict['secretToken'] = self.secret_token.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook + if self.webhook: + _dict['webhook'] = self.webhook.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1GitlabEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessToken": SecretKeySelector.from_dict(obj["accessToken"]) if obj.get("accessToken") is not None else None, + "deleteHookOnFinish": obj.get("deleteHookOnFinish"), + "enableSSLVerification": obj.get("enableSSLVerification"), + "events": obj.get("events"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "gitlabBaseURL": obj.get("gitlabBaseURL"), + "groups": obj.get("groups"), + "metadata": obj.get("metadata"), + "projectID": obj.get("projectID"), + "projects": obj.get("projects"), + "secretToken": SecretKeySelector.from_dict(obj["secretToken"]) if obj.get("secretToken") is not None else None, + "webhook": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhook"]) if obj.get("webhook") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_hdfs_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_hdfs_event_source.py new file mode 100644 index 000000000000..d557e8c9036e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_hdfs_event_source.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1HDFSEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1HDFSEventSource + """ # noqa: E501 + addresses: Optional[List[StrictStr]] = None + check_interval: Optional[StrictStr] = Field(default=None, alias="checkInterval") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + hdfs_user: Optional[StrictStr] = Field(default=None, description="HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.", alias="hdfsUser") + krb_c_cache_secret: Optional[SecretKeySelector] = Field(default=None, alias="krbCCacheSecret") + krb_config_config_map: Optional[ConfigMapKeySelector] = Field(default=None, alias="krbConfigConfigMap") + krb_keytab_secret: Optional[SecretKeySelector] = Field(default=None, alias="krbKeytabSecret") + krb_realm: Optional[StrictStr] = Field(default=None, description="KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used.", alias="krbRealm") + krb_service_principal_name: Optional[StrictStr] = Field(default=None, description="KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used.", alias="krbServicePrincipalName") + krb_username: Optional[StrictStr] = Field(default=None, description="KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.", alias="krbUsername") + metadata: Optional[Dict[str, StrictStr]] = None + type: Optional[StrictStr] = None + watch_path_config: Optional[IoArgoprojEventsV1alpha1WatchPathConfig] = Field(default=None, alias="watchPathConfig") + __properties: ClassVar[List[str]] = ["addresses", "checkInterval", "filter", "hdfsUser", "krbCCacheSecret", "krbConfigConfigMap", "krbKeytabSecret", "krbRealm", "krbServicePrincipalName", "krbUsername", "metadata", "type", "watchPathConfig"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1HDFSEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of krb_c_cache_secret + if self.krb_c_cache_secret: + _dict['krbCCacheSecret'] = self.krb_c_cache_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of krb_config_config_map + if self.krb_config_config_map: + _dict['krbConfigConfigMap'] = self.krb_config_config_map.to_dict() + # override the default output from pydantic by calling `to_dict()` of krb_keytab_secret + if self.krb_keytab_secret: + _dict['krbKeytabSecret'] = self.krb_keytab_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of watch_path_config + if self.watch_path_config: + _dict['watchPathConfig'] = self.watch_path_config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1HDFSEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "addresses": obj.get("addresses"), + "checkInterval": obj.get("checkInterval"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "hdfsUser": obj.get("hdfsUser"), + "krbCCacheSecret": SecretKeySelector.from_dict(obj["krbCCacheSecret"]) if obj.get("krbCCacheSecret") is not None else None, + "krbConfigConfigMap": ConfigMapKeySelector.from_dict(obj["krbConfigConfigMap"]) if obj.get("krbConfigConfigMap") is not None else None, + "krbKeytabSecret": SecretKeySelector.from_dict(obj["krbKeytabSecret"]) if obj.get("krbKeytabSecret") is not None else None, + "krbRealm": obj.get("krbRealm"), + "krbServicePrincipalName": obj.get("krbServicePrincipalName"), + "krbUsername": obj.get("krbUsername"), + "metadata": obj.get("metadata"), + "type": obj.get("type"), + "watchPathConfig": IoArgoprojEventsV1alpha1WatchPathConfig.from_dict(obj["watchPathConfig"]) if obj.get("watchPathConfig") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_http_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_http_trigger.py new file mode 100644 index 000000000000..1e18088eaef9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_http_trigger.py @@ -0,0 +1,134 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_secure_header import IoArgoprojEventsV1alpha1SecureHeader +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1HTTPTrigger(BaseModel): + """ + IoArgoprojEventsV1alpha1HTTPTrigger + """ # noqa: E501 + basic_auth: Optional[IoArgoprojEventsV1alpha1BasicAuth] = Field(default=None, alias="basicAuth") + headers: Optional[Dict[str, StrictStr]] = None + method: Optional[StrictStr] = None + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource.") + payload: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + secure_headers: Optional[List[IoArgoprojEventsV1alpha1SecureHeader]] = Field(default=None, alias="secureHeaders") + timeout: Optional[StrictStr] = None + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + url: Optional[StrictStr] = Field(default=None, description="URL refers to the URL to send HTTP request to.") + __properties: ClassVar[List[str]] = ["basicAuth", "headers", "method", "parameters", "payload", "secureHeaders", "timeout", "tls", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1HTTPTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of basic_auth + if self.basic_auth: + _dict['basicAuth'] = self.basic_auth.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in payload (list) + _items = [] + if self.payload: + for _item in self.payload: + if _item: + _items.append(_item.to_dict()) + _dict['payload'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in secure_headers (list) + _items = [] + if self.secure_headers: + for _item in self.secure_headers: + if _item: + _items.append(_item.to_dict()) + _dict['secureHeaders'] = _items + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1HTTPTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "basicAuth": IoArgoprojEventsV1alpha1BasicAuth.from_dict(obj["basicAuth"]) if obj.get("basicAuth") is not None else None, + "headers": obj.get("headers"), + "method": obj.get("method"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "payload": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["payload"]] if obj.get("payload") is not None else None, + "secureHeaders": [IoArgoprojEventsV1alpha1SecureHeader.from_dict(_item) for _item in obj["secureHeaders"]] if obj.get("secureHeaders") is not None else None, + "timeout": obj.get("timeout"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_int64_or_string.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_int64_or_string.py new file mode 100644 index 000000000000..009bc7bbde7c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_int64_or_string.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Int64OrString(BaseModel): + """ + IoArgoprojEventsV1alpha1Int64OrString + """ # noqa: E501 + int64_val: Optional[StrictStr] = Field(default=None, alias="int64Val") + str_val: Optional[StrictStr] = Field(default=None, alias="strVal") + type: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["int64Val", "strVal", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Int64OrString from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Int64OrString from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "int64Val": obj.get("int64Val"), + "strVal": obj.get("strVal"), + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_k8_s_resource_policy.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_k8_s_resource_policy.py new file mode 100644 index 000000000000..ce7787520b67 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_k8_s_resource_policy.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1K8SResourcePolicy(BaseModel): + """ + IoArgoprojEventsV1alpha1K8SResourcePolicy + """ # noqa: E501 + backoff: Optional[IoArgoprojEventsV1alpha1Backoff] = None + error_on_backoff_timeout: Optional[StrictBool] = Field(default=None, alias="errorOnBackoffTimeout") + labels: Optional[Dict[str, StrictStr]] = None + __properties: ClassVar[List[str]] = ["backoff", "errorOnBackoffTimeout", "labels"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1K8SResourcePolicy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of backoff + if self.backoff: + _dict['backoff'] = self.backoff.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1K8SResourcePolicy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "backoff": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["backoff"]) if obj.get("backoff") is not None else None, + "errorOnBackoffTimeout": obj.get("errorOnBackoffTimeout"), + "labels": obj.get("labels") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_consumer_group.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_consumer_group.py new file mode 100644 index 000000000000..1cc17e121ee9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_consumer_group.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1KafkaConsumerGroup(BaseModel): + """ + IoArgoprojEventsV1alpha1KafkaConsumerGroup + """ # noqa: E501 + group_name: Optional[StrictStr] = Field(default=None, alias="groupName") + oldest: Optional[StrictBool] = None + rebalance_strategy: Optional[StrictStr] = Field(default=None, alias="rebalanceStrategy") + __properties: ClassVar[List[str]] = ["groupName", "oldest", "rebalanceStrategy"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1KafkaConsumerGroup from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1KafkaConsumerGroup from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "groupName": obj.get("groupName"), + "oldest": obj.get("oldest"), + "rebalanceStrategy": obj.get("rebalanceStrategy") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_event_source.py new file mode 100644 index 000000000000..c65c996b9238 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_event_source.py @@ -0,0 +1,131 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_consumer_group import IoArgoprojEventsV1alpha1KafkaConsumerGroup +from argo_workflows.models.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1KafkaEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1KafkaEventSource + """ # noqa: E501 + config: Optional[StrictStr] = Field(default=None, description="Yaml format Sarama config for Kafka connection. It follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go e.g. consumer: fetch: min: 1 net: MaxOpenRequests: 5 +optional") + connection_backoff: Optional[IoArgoprojEventsV1alpha1Backoff] = Field(default=None, alias="connectionBackoff") + consumer_group: Optional[IoArgoprojEventsV1alpha1KafkaConsumerGroup] = Field(default=None, alias="consumerGroup") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + limit_events_per_second: Optional[StrictStr] = Field(default=None, alias="limitEventsPerSecond") + metadata: Optional[Dict[str, StrictStr]] = None + partition: Optional[StrictStr] = None + sasl: Optional[IoArgoprojEventsV1alpha1SASLConfig] = None + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + topic: Optional[StrictStr] = None + url: Optional[StrictStr] = None + version: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["config", "connectionBackoff", "consumerGroup", "filter", "jsonBody", "limitEventsPerSecond", "metadata", "partition", "sasl", "tls", "topic", "url", "version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1KafkaEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of connection_backoff + if self.connection_backoff: + _dict['connectionBackoff'] = self.connection_backoff.to_dict() + # override the default output from pydantic by calling `to_dict()` of consumer_group + if self.consumer_group: + _dict['consumerGroup'] = self.consumer_group.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of sasl + if self.sasl: + _dict['sasl'] = self.sasl.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1KafkaEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": obj.get("config"), + "connectionBackoff": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["connectionBackoff"]) if obj.get("connectionBackoff") is not None else None, + "consumerGroup": IoArgoprojEventsV1alpha1KafkaConsumerGroup.from_dict(obj["consumerGroup"]) if obj.get("consumerGroup") is not None else None, + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "jsonBody": obj.get("jsonBody"), + "limitEventsPerSecond": obj.get("limitEventsPerSecond"), + "metadata": obj.get("metadata"), + "partition": obj.get("partition"), + "sasl": IoArgoprojEventsV1alpha1SASLConfig.from_dict(obj["sasl"]) if obj.get("sasl") is not None else None, + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "topic": obj.get("topic"), + "url": obj.get("url"), + "version": obj.get("version") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_trigger.py new file mode 100644 index 000000000000..7f1b951ee6f4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_kafka_trigger.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_schema_registry_config import IoArgoprojEventsV1alpha1SchemaRegistryConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1KafkaTrigger(BaseModel): + """ + KafkaTrigger refers to the specification of the Kafka trigger. + """ # noqa: E501 + compress: Optional[StrictBool] = None + flush_frequency: Optional[StrictInt] = Field(default=None, alias="flushFrequency") + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Parameters is the list of parameters that is applied to resolved Kafka trigger object.") + partition: Optional[StrictInt] = None + partitioning_key: Optional[StrictStr] = Field(default=None, description="The partitioning key for the messages put on the Kafka topic. +optional.", alias="partitioningKey") + payload: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Payload is the list of key-value extracted from an event payload to construct the request payload.") + required_acks: Optional[StrictInt] = Field(default=None, description="RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional.", alias="requiredAcks") + sasl: Optional[IoArgoprojEventsV1alpha1SASLConfig] = None + schema_registry: Optional[IoArgoprojEventsV1alpha1SchemaRegistryConfig] = Field(default=None, alias="schemaRegistry") + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + topic: Optional[StrictStr] = None + url: Optional[StrictStr] = Field(default=None, description="URL of the Kafka broker, multiple URLs separated by comma.") + version: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["compress", "flushFrequency", "parameters", "partition", "partitioningKey", "payload", "requiredAcks", "sasl", "schemaRegistry", "tls", "topic", "url", "version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1KafkaTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in payload (list) + _items = [] + if self.payload: + for _item in self.payload: + if _item: + _items.append(_item.to_dict()) + _dict['payload'] = _items + # override the default output from pydantic by calling `to_dict()` of sasl + if self.sasl: + _dict['sasl'] = self.sasl.to_dict() + # override the default output from pydantic by calling `to_dict()` of schema_registry + if self.schema_registry: + _dict['schemaRegistry'] = self.schema_registry.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1KafkaTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "compress": obj.get("compress"), + "flushFrequency": obj.get("flushFrequency"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "partition": obj.get("partition"), + "partitioningKey": obj.get("partitioningKey"), + "payload": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["payload"]] if obj.get("payload") is not None else None, + "requiredAcks": obj.get("requiredAcks"), + "sasl": IoArgoprojEventsV1alpha1SASLConfig.from_dict(obj["sasl"]) if obj.get("sasl") is not None else None, + "schemaRegistry": IoArgoprojEventsV1alpha1SchemaRegistryConfig.from_dict(obj["schemaRegistry"]) if obj.get("schemaRegistry") is not None else None, + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "topic": obj.get("topic"), + "url": obj.get("url"), + "version": obj.get("version") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_log_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_log_trigger.py new file mode 100644 index 000000000000..a0a404143a90 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_log_trigger.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1LogTrigger(BaseModel): + """ + IoArgoprojEventsV1alpha1LogTrigger + """ # noqa: E501 + interval_seconds: Optional[StrictStr] = Field(default=None, alias="intervalSeconds") + __properties: ClassVar[List[str]] = ["intervalSeconds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1LogTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1LogTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "intervalSeconds": obj.get("intervalSeconds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_metadata.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_metadata.py new file mode 100644 index 000000000000..56e2da731ee6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_metadata.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Metadata(BaseModel): + """ + IoArgoprojEventsV1alpha1Metadata + """ # noqa: E501 + annotations: Optional[Dict[str, StrictStr]] = None + labels: Optional[Dict[str, StrictStr]] = None + __properties: ClassVar[List[str]] = ["annotations", "labels"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Metadata from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Metadata from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "annotations": obj.get("annotations"), + "labels": obj.get("labels") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_mqtt_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_mqtt_event_source.py new file mode 100644 index 000000000000..d9cb940ce41b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_mqtt_event_source.py @@ -0,0 +1,119 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1MQTTEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1MQTTEventSource + """ # noqa: E501 + auth: Optional[IoArgoprojEventsV1alpha1BasicAuth] = None + client_id: Optional[StrictStr] = Field(default=None, alias="clientId") + connection_backoff: Optional[IoArgoprojEventsV1alpha1Backoff] = Field(default=None, alias="connectionBackoff") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + topic: Optional[StrictStr] = None + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["auth", "clientId", "connectionBackoff", "filter", "jsonBody", "metadata", "tls", "topic", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1MQTTEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth + if self.auth: + _dict['auth'] = self.auth.to_dict() + # override the default output from pydantic by calling `to_dict()` of connection_backoff + if self.connection_backoff: + _dict['connectionBackoff'] = self.connection_backoff.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1MQTTEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "auth": IoArgoprojEventsV1alpha1BasicAuth.from_dict(obj["auth"]) if obj.get("auth") is not None else None, + "clientId": obj.get("clientId"), + "connectionBackoff": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["connectionBackoff"]) if obj.get("connectionBackoff") is not None else None, + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "topic": obj.get("topic"), + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_auth.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_auth.py new file mode 100644 index 000000000000..ce465caba65a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_auth.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1NATSAuth(BaseModel): + """ + IoArgoprojEventsV1alpha1NATSAuth + """ # noqa: E501 + basic: Optional[IoArgoprojEventsV1alpha1BasicAuth] = None + credential: Optional[SecretKeySelector] = None + nkey: Optional[SecretKeySelector] = None + token: Optional[SecretKeySelector] = None + __properties: ClassVar[List[str]] = ["basic", "credential", "nkey", "token"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1NATSAuth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of basic + if self.basic: + _dict['basic'] = self.basic.to_dict() + # override the default output from pydantic by calling `to_dict()` of credential + if self.credential: + _dict['credential'] = self.credential.to_dict() + # override the default output from pydantic by calling `to_dict()` of nkey + if self.nkey: + _dict['nkey'] = self.nkey.to_dict() + # override the default output from pydantic by calling `to_dict()` of token + if self.token: + _dict['token'] = self.token.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1NATSAuth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "basic": IoArgoprojEventsV1alpha1BasicAuth.from_dict(obj["basic"]) if obj.get("basic") is not None else None, + "credential": SecretKeySelector.from_dict(obj["credential"]) if obj.get("credential") is not None else None, + "nkey": SecretKeySelector.from_dict(obj["nkey"]) if obj.get("nkey") is not None else None, + "token": SecretKeySelector.from_dict(obj["token"]) if obj.get("token") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_events_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_events_source.py new file mode 100644 index 000000000000..05a989c8bcad --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_events_source.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_auth import IoArgoprojEventsV1alpha1NATSAuth +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1NATSEventsSource(BaseModel): + """ + IoArgoprojEventsV1alpha1NATSEventsSource + """ # noqa: E501 + auth: Optional[IoArgoprojEventsV1alpha1NATSAuth] = None + connection_backoff: Optional[IoArgoprojEventsV1alpha1Backoff] = Field(default=None, alias="connectionBackoff") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + subject: Optional[StrictStr] = None + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["auth", "connectionBackoff", "filter", "jsonBody", "metadata", "subject", "tls", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1NATSEventsSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth + if self.auth: + _dict['auth'] = self.auth.to_dict() + # override the default output from pydantic by calling `to_dict()` of connection_backoff + if self.connection_backoff: + _dict['connectionBackoff'] = self.connection_backoff.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1NATSEventsSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "auth": IoArgoprojEventsV1alpha1NATSAuth.from_dict(obj["auth"]) if obj.get("auth") is not None else None, + "connectionBackoff": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["connectionBackoff"]) if obj.get("connectionBackoff") is not None else None, + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "subject": obj.get("subject"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_trigger.py new file mode 100644 index 000000000000..76f29aba5ac2 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nats_trigger.py @@ -0,0 +1,114 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1NATSTrigger(BaseModel): + """ + NATSTrigger refers to the specification of the NATS trigger. + """ # noqa: E501 + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + payload: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + subject: Optional[StrictStr] = Field(default=None, description="Name of the subject to put message on.") + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + url: Optional[StrictStr] = Field(default=None, description="URL of the NATS cluster.") + __properties: ClassVar[List[str]] = ["parameters", "payload", "subject", "tls", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1NATSTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in payload (list) + _items = [] + if self.payload: + for _item in self.payload: + if _item: + _items.append(_item.to_dict()) + _dict['payload'] = _items + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1NATSTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "payload": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["payload"]] if obj.get("payload") is not None else None, + "subject": obj.get("subject"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nsq_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nsq_event_source.py new file mode 100644 index 000000000000..0e11d9ddf43a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_nsq_event_source.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1NSQEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1NSQEventSource + """ # noqa: E501 + channel: Optional[StrictStr] = None + connection_backoff: Optional[IoArgoprojEventsV1alpha1Backoff] = Field(default=None, alias="connectionBackoff") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + host_address: Optional[StrictStr] = Field(default=None, alias="hostAddress") + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + topic: Optional[StrictStr] = Field(default=None, description="Topic to subscribe to.") + __properties: ClassVar[List[str]] = ["channel", "connectionBackoff", "filter", "hostAddress", "jsonBody", "metadata", "tls", "topic"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1NSQEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of connection_backoff + if self.connection_backoff: + _dict['connectionBackoff'] = self.connection_backoff.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1NSQEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "channel": obj.get("channel"), + "connectionBackoff": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["connectionBackoff"]) if obj.get("connectionBackoff") is not None else None, + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "hostAddress": obj.get("hostAddress"), + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "topic": obj.get("topic") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_open_whisk_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_open_whisk_trigger.py new file mode 100644 index 000000000000..ef530668db6a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_open_whisk_trigger.py @@ -0,0 +1,118 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1OpenWhiskTrigger(BaseModel): + """ + OpenWhiskTrigger refers to the specification of the OpenWhisk trigger. + """ # noqa: E501 + action_name: Optional[StrictStr] = Field(default=None, description="Name of the action/function.", alias="actionName") + auth_token: Optional[SecretKeySelector] = Field(default=None, alias="authToken") + host: Optional[StrictStr] = Field(default=None, description="Host URL of the OpenWhisk.") + namespace: Optional[StrictStr] = Field(default=None, description="Namespace for the action. Defaults to \"_\". +optional.") + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + payload: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Payload is the list of key-value extracted from an event payload to construct the request payload.") + version: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["actionName", "authToken", "host", "namespace", "parameters", "payload", "version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1OpenWhiskTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth_token + if self.auth_token: + _dict['authToken'] = self.auth_token.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in payload (list) + _items = [] + if self.payload: + for _item in self.payload: + if _item: + _items.append(_item.to_dict()) + _dict['payload'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1OpenWhiskTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "actionName": obj.get("actionName"), + "authToken": SecretKeySelector.from_dict(obj["authToken"]) if obj.get("authToken") is not None else None, + "host": obj.get("host"), + "namespace": obj.get("namespace"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "payload": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["payload"]] if obj.get("payload") is not None else None, + "version": obj.get("version") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_owned_repositories.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_owned_repositories.py new file mode 100644 index 000000000000..a2a5f8be3a7d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_owned_repositories.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1OwnedRepositories(BaseModel): + """ + IoArgoprojEventsV1alpha1OwnedRepositories + """ # noqa: E501 + names: Optional[List[StrictStr]] = None + owner: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["names", "owner"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1OwnedRepositories from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1OwnedRepositories from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "names": obj.get("names"), + "owner": obj.get("owner") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_payload_field.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_payload_field.py new file mode 100644 index 000000000000..4069dba4b07f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_payload_field.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1PayloadField(BaseModel): + """ + PayloadField binds a value at path within the event payload against a name. + """ # noqa: E501 + name: Optional[StrictStr] = Field(default=None, description="Name acts as key that holds the value at the path.") + path: Optional[StrictStr] = Field(default=None, description="Path is the JSONPath of the event's (JSON decoded) data key Path is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.") + __properties: ClassVar[List[str]] = ["name", "path"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1PayloadField from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1PayloadField from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "path": obj.get("path") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pub_sub_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pub_sub_event_source.py new file mode 100644 index 000000000000..541acfdcff6c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pub_sub_event_source.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1PubSubEventSource(BaseModel): + """ + PubSubEventSource refers to event-source for GCP PubSub related events. + """ # noqa: E501 + credential_secret: Optional[SecretKeySelector] = Field(default=None, alias="credentialSecret") + delete_subscription_on_finish: Optional[StrictBool] = Field(default=None, alias="deleteSubscriptionOnFinish") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + project_id: Optional[StrictStr] = Field(default=None, alias="projectID") + subscription_id: Optional[StrictStr] = Field(default=None, alias="subscriptionID") + topic: Optional[StrictStr] = None + topic_project_id: Optional[StrictStr] = Field(default=None, alias="topicProjectID") + __properties: ClassVar[List[str]] = ["credentialSecret", "deleteSubscriptionOnFinish", "filter", "jsonBody", "metadata", "projectID", "subscriptionID", "topic", "topicProjectID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1PubSubEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of credential_secret + if self.credential_secret: + _dict['credentialSecret'] = self.credential_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1PubSubEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "credentialSecret": SecretKeySelector.from_dict(obj["credentialSecret"]) if obj.get("credentialSecret") is not None else None, + "deleteSubscriptionOnFinish": obj.get("deleteSubscriptionOnFinish"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "projectID": obj.get("projectID"), + "subscriptionID": obj.get("subscriptionID"), + "topic": obj.get("topic"), + "topicProjectID": obj.get("topicProjectID") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pulsar_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pulsar_event_source.py new file mode 100644 index 000000000000..6525a63f03b0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pulsar_event_source.py @@ -0,0 +1,135 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1PulsarEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1PulsarEventSource + """ # noqa: E501 + auth_athenz_params: Optional[Dict[str, StrictStr]] = Field(default=None, alias="authAthenzParams") + auth_athenz_secret: Optional[SecretKeySelector] = Field(default=None, alias="authAthenzSecret") + auth_token_secret: Optional[SecretKeySelector] = Field(default=None, alias="authTokenSecret") + connection_backoff: Optional[IoArgoprojEventsV1alpha1Backoff] = Field(default=None, alias="connectionBackoff") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + tls_allow_insecure_connection: Optional[StrictBool] = Field(default=None, alias="tlsAllowInsecureConnection") + tls_trust_certs_secret: Optional[SecretKeySelector] = Field(default=None, alias="tlsTrustCertsSecret") + tls_validate_hostname: Optional[StrictBool] = Field(default=None, alias="tlsValidateHostname") + topics: Optional[List[StrictStr]] = None + type: Optional[StrictStr] = None + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["authAthenzParams", "authAthenzSecret", "authTokenSecret", "connectionBackoff", "filter", "jsonBody", "metadata", "tls", "tlsAllowInsecureConnection", "tlsTrustCertsSecret", "tlsValidateHostname", "topics", "type", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1PulsarEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth_athenz_secret + if self.auth_athenz_secret: + _dict['authAthenzSecret'] = self.auth_athenz_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of auth_token_secret + if self.auth_token_secret: + _dict['authTokenSecret'] = self.auth_token_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of connection_backoff + if self.connection_backoff: + _dict['connectionBackoff'] = self.connection_backoff.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls_trust_certs_secret + if self.tls_trust_certs_secret: + _dict['tlsTrustCertsSecret'] = self.tls_trust_certs_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1PulsarEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "authAthenzParams": obj.get("authAthenzParams"), + "authAthenzSecret": SecretKeySelector.from_dict(obj["authAthenzSecret"]) if obj.get("authAthenzSecret") is not None else None, + "authTokenSecret": SecretKeySelector.from_dict(obj["authTokenSecret"]) if obj.get("authTokenSecret") is not None else None, + "connectionBackoff": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["connectionBackoff"]) if obj.get("connectionBackoff") is not None else None, + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "tlsAllowInsecureConnection": obj.get("tlsAllowInsecureConnection"), + "tlsTrustCertsSecret": SecretKeySelector.from_dict(obj["tlsTrustCertsSecret"]) if obj.get("tlsTrustCertsSecret") is not None else None, + "tlsValidateHostname": obj.get("tlsValidateHostname"), + "topics": obj.get("topics"), + "type": obj.get("type"), + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pulsar_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pulsar_trigger.py new file mode 100644 index 000000000000..f52cfa962f7b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_pulsar_trigger.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1PulsarTrigger(BaseModel): + """ + PulsarTrigger refers to the specification of the Pulsar trigger. + """ # noqa: E501 + auth_athenz_params: Optional[Dict[str, StrictStr]] = Field(default=None, alias="authAthenzParams") + auth_athenz_secret: Optional[SecretKeySelector] = Field(default=None, alias="authAthenzSecret") + auth_token_secret: Optional[SecretKeySelector] = Field(default=None, alias="authTokenSecret") + connection_backoff: Optional[IoArgoprojEventsV1alpha1Backoff] = Field(default=None, alias="connectionBackoff") + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Parameters is the list of parameters that is applied to resolved Kafka trigger object.") + payload: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Payload is the list of key-value extracted from an event payload to construct the request payload.") + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + tls_allow_insecure_connection: Optional[StrictBool] = Field(default=None, alias="tlsAllowInsecureConnection") + tls_trust_certs_secret: Optional[SecretKeySelector] = Field(default=None, alias="tlsTrustCertsSecret") + tls_validate_hostname: Optional[StrictBool] = Field(default=None, alias="tlsValidateHostname") + topic: Optional[StrictStr] = None + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["authAthenzParams", "authAthenzSecret", "authTokenSecret", "connectionBackoff", "parameters", "payload", "tls", "tlsAllowInsecureConnection", "tlsTrustCertsSecret", "tlsValidateHostname", "topic", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1PulsarTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth_athenz_secret + if self.auth_athenz_secret: + _dict['authAthenzSecret'] = self.auth_athenz_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of auth_token_secret + if self.auth_token_secret: + _dict['authTokenSecret'] = self.auth_token_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of connection_backoff + if self.connection_backoff: + _dict['connectionBackoff'] = self.connection_backoff.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in payload (list) + _items = [] + if self.payload: + for _item in self.payload: + if _item: + _items.append(_item.to_dict()) + _dict['payload'] = _items + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls_trust_certs_secret + if self.tls_trust_certs_secret: + _dict['tlsTrustCertsSecret'] = self.tls_trust_certs_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1PulsarTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "authAthenzParams": obj.get("authAthenzParams"), + "authAthenzSecret": SecretKeySelector.from_dict(obj["authAthenzSecret"]) if obj.get("authAthenzSecret") is not None else None, + "authTokenSecret": SecretKeySelector.from_dict(obj["authTokenSecret"]) if obj.get("authTokenSecret") is not None else None, + "connectionBackoff": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["connectionBackoff"]) if obj.get("connectionBackoff") is not None else None, + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "payload": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["payload"]] if obj.get("payload") is not None else None, + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "tlsAllowInsecureConnection": obj.get("tlsAllowInsecureConnection"), + "tlsTrustCertsSecret": SecretKeySelector.from_dict(obj["tlsTrustCertsSecret"]) if obj.get("tlsTrustCertsSecret") is not None else None, + "tlsValidateHostname": obj.get("tlsValidateHostname"), + "topic": obj.get("topic"), + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_rate_limit.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_rate_limit.py new file mode 100644 index 000000000000..a7029df6bc00 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_rate_limit.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1RateLimit(BaseModel): + """ + IoArgoprojEventsV1alpha1RateLimit + """ # noqa: E501 + requests_per_unit: Optional[StrictInt] = Field(default=None, alias="requestsPerUnit") + unit: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["requestsPerUnit", "unit"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1RateLimit from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1RateLimit from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "requestsPerUnit": obj.get("requestsPerUnit"), + "unit": obj.get("unit") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_redis_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_redis_event_source.py new file mode 100644 index 000000000000..d7fb7215fa01 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_redis_event_source.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1RedisEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1RedisEventSource + """ # noqa: E501 + channels: Optional[List[StrictStr]] = None + db: Optional[StrictInt] = None + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + host_address: Optional[StrictStr] = Field(default=None, alias="hostAddress") + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + namespace: Optional[StrictStr] = None + password: Optional[SecretKeySelector] = None + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + username: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["channels", "db", "filter", "hostAddress", "jsonBody", "metadata", "namespace", "password", "tls", "username"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1RedisEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of password + if self.password: + _dict['password'] = self.password.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1RedisEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "channels": obj.get("channels"), + "db": obj.get("db"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "hostAddress": obj.get("hostAddress"), + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "namespace": obj.get("namespace"), + "password": SecretKeySelector.from_dict(obj["password"]) if obj.get("password") is not None else None, + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "username": obj.get("username") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_redis_stream_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_redis_stream_event_source.py new file mode 100644 index 000000000000..3e5f4ce7b6b0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_redis_stream_event_source.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1RedisStreamEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1RedisStreamEventSource + """ # noqa: E501 + consumer_group: Optional[StrictStr] = Field(default=None, alias="consumerGroup") + db: Optional[StrictInt] = None + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + host_address: Optional[StrictStr] = Field(default=None, alias="hostAddress") + max_msg_count_per_read: Optional[StrictInt] = Field(default=None, alias="maxMsgCountPerRead") + metadata: Optional[Dict[str, StrictStr]] = None + password: Optional[SecretKeySelector] = None + streams: Optional[List[StrictStr]] = Field(default=None, description="Streams to look for entries. XREADGROUP is used on all streams using a single consumer group.") + tls: Optional[IoArgoprojEventsV1alpha1TLSConfig] = None + username: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["consumerGroup", "db", "filter", "hostAddress", "maxMsgCountPerRead", "metadata", "password", "streams", "tls", "username"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1RedisStreamEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of password + if self.password: + _dict['password'] = self.password.to_dict() + # override the default output from pydantic by calling `to_dict()` of tls + if self.tls: + _dict['tls'] = self.tls.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1RedisStreamEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "consumerGroup": obj.get("consumerGroup"), + "db": obj.get("db"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "hostAddress": obj.get("hostAddress"), + "maxMsgCountPerRead": obj.get("maxMsgCountPerRead"), + "metadata": obj.get("metadata"), + "password": SecretKeySelector.from_dict(obj["password"]) if obj.get("password") is not None else None, + "streams": obj.get("streams"), + "tls": IoArgoprojEventsV1alpha1TLSConfig.from_dict(obj["tls"]) if obj.get("tls") is not None else None, + "username": obj.get("username") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource.py new file mode 100644 index 000000000000..66c423b72c51 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Resource(BaseModel): + """ + Resource represent arbitrary structured data. + """ # noqa: E501 + value: Optional[Union[Annotated[bytes, Field(strict=True)], Annotated[str, Field(strict=True)]]] = None + __properties: ClassVar[List[str]] = ["value"] + + @field_validator('value') + def value_validate_regular_expression(cls, value): + """Validates the regular expression""" + if value is None: + return value + + if not re.match(r"^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$", value): + raise ValueError(r"must validate the regular expression /^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Resource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Resource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource_event_source.py new file mode 100644 index 000000000000..5e2bef70ddcf --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource_event_source.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.group_version_resource import GroupVersionResource +from argo_workflows.models.io_argoproj_events_v1alpha1_resource_filter import IoArgoprojEventsV1alpha1ResourceFilter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1ResourceEventSource(BaseModel): + """ + ResourceEventSource refers to a event-source for K8s resource related events. + """ # noqa: E501 + event_types: Optional[List[StrictStr]] = Field(default=None, description="EventTypes is the list of event type to watch. Possible values are - ADD, UPDATE and DELETE.", alias="eventTypes") + filter: Optional[IoArgoprojEventsV1alpha1ResourceFilter] = None + group_version_resource: Optional[GroupVersionResource] = Field(default=None, alias="groupVersionResource") + metadata: Optional[Dict[str, StrictStr]] = None + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["eventTypes", "filter", "groupVersionResource", "metadata", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ResourceEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of group_version_resource + if self.group_version_resource: + _dict['groupVersionResource'] = self.group_version_resource.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ResourceEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "eventTypes": obj.get("eventTypes"), + "filter": IoArgoprojEventsV1alpha1ResourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "groupVersionResource": GroupVersionResource.from_dict(obj["groupVersionResource"]) if obj.get("groupVersionResource") is not None else None, + "metadata": obj.get("metadata"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource_filter.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource_filter.py new file mode 100644 index 000000000000..0d8d78cd869d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_resource_filter.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_selector import IoArgoprojEventsV1alpha1Selector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1ResourceFilter(BaseModel): + """ + IoArgoprojEventsV1alpha1ResourceFilter + """ # noqa: E501 + after_start: Optional[StrictBool] = Field(default=None, alias="afterStart") + created_by: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="createdBy") + fields: Optional[List[IoArgoprojEventsV1alpha1Selector]] = None + labels: Optional[List[IoArgoprojEventsV1alpha1Selector]] = None + prefix: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["afterStart", "createdBy", "fields", "labels", "prefix"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ResourceFilter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in fields (list) + _items = [] + if self.fields: + for _item in self.fields: + if _item: + _items.append(_item.to_dict()) + _dict['fields'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in labels (list) + _items = [] + if self.labels: + for _item in self.labels: + if _item: + _items.append(_item.to_dict()) + _dict['labels'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ResourceFilter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "afterStart": obj.get("afterStart"), + "createdBy": obj.get("createdBy"), + "fields": [IoArgoprojEventsV1alpha1Selector.from_dict(_item) for _item in obj["fields"]] if obj.get("fields") is not None else None, + "labels": [IoArgoprojEventsV1alpha1Selector.from_dict(_item) for _item in obj["labels"]] if obj.get("labels") is not None else None, + "prefix": obj.get("prefix") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_artifact.py new file mode 100644 index 000000000000..9402a522fd2f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_artifact.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_bucket import IoArgoprojEventsV1alpha1S3Bucket +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_filter import IoArgoprojEventsV1alpha1S3Filter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1S3Artifact(BaseModel): + """ + IoArgoprojEventsV1alpha1S3Artifact + """ # noqa: E501 + access_key: Optional[SecretKeySelector] = Field(default=None, alias="accessKey") + bucket: Optional[IoArgoprojEventsV1alpha1S3Bucket] = None + ca_certificate: Optional[SecretKeySelector] = Field(default=None, alias="caCertificate") + endpoint: Optional[StrictStr] = None + events: Optional[List[StrictStr]] = None + filter: Optional[IoArgoprojEventsV1alpha1S3Filter] = None + insecure: Optional[StrictBool] = None + metadata: Optional[Dict[str, StrictStr]] = None + region: Optional[StrictStr] = None + secret_key: Optional[SecretKeySelector] = Field(default=None, alias="secretKey") + __properties: ClassVar[List[str]] = ["accessKey", "bucket", "caCertificate", "endpoint", "events", "filter", "insecure", "metadata", "region", "secretKey"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1S3Artifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_key + if self.access_key: + _dict['accessKey'] = self.access_key.to_dict() + # override the default output from pydantic by calling `to_dict()` of bucket + if self.bucket: + _dict['bucket'] = self.bucket.to_dict() + # override the default output from pydantic by calling `to_dict()` of ca_certificate + if self.ca_certificate: + _dict['caCertificate'] = self.ca_certificate.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_key + if self.secret_key: + _dict['secretKey'] = self.secret_key.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1S3Artifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessKey": SecretKeySelector.from_dict(obj["accessKey"]) if obj.get("accessKey") is not None else None, + "bucket": IoArgoprojEventsV1alpha1S3Bucket.from_dict(obj["bucket"]) if obj.get("bucket") is not None else None, + "caCertificate": SecretKeySelector.from_dict(obj["caCertificate"]) if obj.get("caCertificate") is not None else None, + "endpoint": obj.get("endpoint"), + "events": obj.get("events"), + "filter": IoArgoprojEventsV1alpha1S3Filter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "insecure": obj.get("insecure"), + "metadata": obj.get("metadata"), + "region": obj.get("region"), + "secretKey": SecretKeySelector.from_dict(obj["secretKey"]) if obj.get("secretKey") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_bucket.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_bucket.py new file mode 100644 index 000000000000..9968211c9fe0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_bucket.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1S3Bucket(BaseModel): + """ + IoArgoprojEventsV1alpha1S3Bucket + """ # noqa: E501 + key: Optional[StrictStr] = None + name: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["key", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1S3Bucket from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1S3Bucket from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "name": obj.get("name") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_filter.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_filter.py new file mode 100644 index 000000000000..25e37895e701 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_s3_filter.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1S3Filter(BaseModel): + """ + IoArgoprojEventsV1alpha1S3Filter + """ # noqa: E501 + prefix: Optional[StrictStr] = None + suffix: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["prefix", "suffix"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1S3Filter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1S3Filter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "prefix": obj.get("prefix"), + "suffix": obj.get("suffix") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sasl_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sasl_config.py new file mode 100644 index 000000000000..0973c63724a2 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sasl_config.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SASLConfig(BaseModel): + """ + IoArgoprojEventsV1alpha1SASLConfig + """ # noqa: E501 + mechanism: Optional[StrictStr] = None + password_secret: Optional[SecretKeySelector] = Field(default=None, alias="passwordSecret") + user_secret: Optional[SecretKeySelector] = Field(default=None, alias="userSecret") + __properties: ClassVar[List[str]] = ["mechanism", "passwordSecret", "userSecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SASLConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of password_secret + if self.password_secret: + _dict['passwordSecret'] = self.password_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of user_secret + if self.user_secret: + _dict['userSecret'] = self.user_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SASLConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "mechanism": obj.get("mechanism"), + "passwordSecret": SecretKeySelector.from_dict(obj["passwordSecret"]) if obj.get("passwordSecret") is not None else None, + "userSecret": SecretKeySelector.from_dict(obj["userSecret"]) if obj.get("userSecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_schema_registry_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_schema_registry_config.py new file mode 100644 index 000000000000..a37d95f30382 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_schema_registry_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SchemaRegistryConfig(BaseModel): + """ + IoArgoprojEventsV1alpha1SchemaRegistryConfig + """ # noqa: E501 + auth: Optional[IoArgoprojEventsV1alpha1BasicAuth] = None + schema_id: Optional[StrictInt] = Field(default=None, alias="schemaId") + url: Optional[StrictStr] = Field(default=None, description="Schema Registry URL.") + __properties: ClassVar[List[str]] = ["auth", "schemaId", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SchemaRegistryConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth + if self.auth: + _dict['auth'] = self.auth.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SchemaRegistryConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "auth": IoArgoprojEventsV1alpha1BasicAuth.from_dict(obj["auth"]) if obj.get("auth") is not None else None, + "schemaId": obj.get("schemaId"), + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_secure_header.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_secure_header.py new file mode 100644 index 000000000000..432cf131782a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_secure_header.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_value_from_source import IoArgoprojEventsV1alpha1ValueFromSource +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SecureHeader(BaseModel): + """ + IoArgoprojEventsV1alpha1SecureHeader + """ # noqa: E501 + name: Optional[StrictStr] = None + value_from: Optional[IoArgoprojEventsV1alpha1ValueFromSource] = Field(default=None, alias="valueFrom") + __properties: ClassVar[List[str]] = ["name", "valueFrom"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SecureHeader from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of value_from + if self.value_from: + _dict['valueFrom'] = self.value_from.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SecureHeader from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "valueFrom": IoArgoprojEventsV1alpha1ValueFromSource.from_dict(obj["valueFrom"]) if obj.get("valueFrom") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_selector.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_selector.py new file mode 100644 index 000000000000..aa3210cd1d54 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_selector.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Selector(BaseModel): + """ + Selector represents conditional operation to select K8s objects. + """ # noqa: E501 + key: Optional[StrictStr] = None + operation: Optional[StrictStr] = None + value: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["key", "operation", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Selector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Selector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "operation": obj.get("operation"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor.py new file mode 100644 index 000000000000..571760e567ca --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_spec import IoArgoprojEventsV1alpha1SensorSpec +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_status import IoArgoprojEventsV1alpha1SensorStatus +from argo_workflows.models.object_meta import ObjectMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Sensor(BaseModel): + """ + IoArgoprojEventsV1alpha1Sensor + """ # noqa: E501 + metadata: Optional[ObjectMeta] = None + spec: Optional[IoArgoprojEventsV1alpha1SensorSpec] = None + status: Optional[IoArgoprojEventsV1alpha1SensorStatus] = None + __properties: ClassVar[List[str]] = ["metadata", "spec", "status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Sensor from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of spec + if self.spec: + _dict['spec'] = self.spec.to_dict() + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Sensor from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "spec": IoArgoprojEventsV1alpha1SensorSpec.from_dict(obj["spec"]) if obj.get("spec") is not None else None, + "status": IoArgoprojEventsV1alpha1SensorStatus.from_dict(obj["status"]) if obj.get("status") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_list.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_list.py new file mode 100644 index 000000000000..c9a024a17a9b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_list.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.models.list_meta import ListMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SensorList(BaseModel): + """ + IoArgoprojEventsV1alpha1SensorList + """ # noqa: E501 + items: Optional[List[IoArgoprojEventsV1alpha1Sensor]] = None + metadata: Optional[ListMeta] = None + __properties: ClassVar[List[str]] = ["items", "metadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SensorList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SensorList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "items": [IoArgoprojEventsV1alpha1Sensor.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "metadata": ListMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_spec.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_spec.py new file mode 100644 index 000000000000..bd1bb0e7ae54 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_spec.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency import IoArgoprojEventsV1alpha1EventDependency +from argo_workflows.models.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger import IoArgoprojEventsV1alpha1Trigger +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SensorSpec(BaseModel): + """ + IoArgoprojEventsV1alpha1SensorSpec + """ # noqa: E501 + dependencies: Optional[List[IoArgoprojEventsV1alpha1EventDependency]] = Field(default=None, description="Dependencies is a list of the events that this sensor is dependent on.") + error_on_failed_round: Optional[StrictBool] = Field(default=None, description="ErrorOnFailedRound if set to true, marks sensor state as `error` if the previous trigger round fails. Once sensor state is set to `error`, no further triggers will be processed.", alias="errorOnFailedRound") + event_bus_name: Optional[StrictStr] = Field(default=None, alias="eventBusName") + logging_fields: Optional[Dict[str, StrictStr]] = Field(default=None, alias="loggingFields") + replicas: Optional[StrictInt] = None + revision_history_limit: Optional[StrictInt] = Field(default=None, alias="revisionHistoryLimit") + template: Optional[IoArgoprojEventsV1alpha1Template] = None + triggers: Optional[List[IoArgoprojEventsV1alpha1Trigger]] = Field(default=None, description="Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor.") + __properties: ClassVar[List[str]] = ["dependencies", "errorOnFailedRound", "eventBusName", "loggingFields", "replicas", "revisionHistoryLimit", "template", "triggers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SensorSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in dependencies (list) + _items = [] + if self.dependencies: + for _item in self.dependencies: + if _item: + _items.append(_item.to_dict()) + _dict['dependencies'] = _items + # override the default output from pydantic by calling `to_dict()` of template + if self.template: + _dict['template'] = self.template.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in triggers (list) + _items = [] + if self.triggers: + for _item in self.triggers: + if _item: + _items.append(_item.to_dict()) + _dict['triggers'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SensorSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "dependencies": [IoArgoprojEventsV1alpha1EventDependency.from_dict(_item) for _item in obj["dependencies"]] if obj.get("dependencies") is not None else None, + "errorOnFailedRound": obj.get("errorOnFailedRound"), + "eventBusName": obj.get("eventBusName"), + "loggingFields": obj.get("loggingFields"), + "replicas": obj.get("replicas"), + "revisionHistoryLimit": obj.get("revisionHistoryLimit"), + "template": IoArgoprojEventsV1alpha1Template.from_dict(obj["template"]) if obj.get("template") is not None else None, + "triggers": [IoArgoprojEventsV1alpha1Trigger.from_dict(_item) for _item in obj["triggers"]] if obj.get("triggers") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_status.py new file mode 100644 index 000000000000..110e27ff5168 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sensor_status.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SensorStatus(BaseModel): + """ + SensorStatus contains information about the status of a sensor. + """ # noqa: E501 + status: Optional[IoArgoprojEventsV1alpha1Status] = None + __properties: ClassVar[List[str]] = ["status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SensorStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SensorStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "status": IoArgoprojEventsV1alpha1Status.from_dict(obj["status"]) if obj.get("status") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_service.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_service.py new file mode 100644 index 000000000000..a40a4043f04c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_service.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.service_port import ServicePort +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Service(BaseModel): + """ + IoArgoprojEventsV1alpha1Service + """ # noqa: E501 + cluster_ip: Optional[StrictStr] = Field(default=None, alias="clusterIP") + ports: Optional[List[ServicePort]] = None + __properties: ClassVar[List[str]] = ["clusterIP", "ports"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Service from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in ports (list) + _items = [] + if self.ports: + for _item in self.ports: + if _item: + _items.append(_item.to_dict()) + _dict['ports'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Service from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "clusterIP": obj.get("clusterIP"), + "ports": [ServicePort.from_dict(_item) for _item in obj["ports"]] if obj.get("ports") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sftp_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sftp_event_source.py new file mode 100644 index 000000000000..fddadcd3d02b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sftp_event_source.py @@ -0,0 +1,124 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SFTPEventSource(BaseModel): + """ + SFTPEventSource describes an event-source for sftp related events. + """ # noqa: E501 + address: Optional[SecretKeySelector] = None + event_type: Optional[StrictStr] = Field(default=None, alias="eventType") + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + metadata: Optional[Dict[str, StrictStr]] = None + password: Optional[SecretKeySelector] = None + poll_interval_duration: Optional[StrictStr] = Field(default=None, alias="pollIntervalDuration") + ssh_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="sshKeySecret") + username: Optional[SecretKeySelector] = None + watch_path_config: Optional[IoArgoprojEventsV1alpha1WatchPathConfig] = Field(default=None, alias="watchPathConfig") + __properties: ClassVar[List[str]] = ["address", "eventType", "filter", "metadata", "password", "pollIntervalDuration", "sshKeySecret", "username", "watchPathConfig"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SFTPEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of address + if self.address: + _dict['address'] = self.address.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of password + if self.password: + _dict['password'] = self.password.to_dict() + # override the default output from pydantic by calling `to_dict()` of ssh_key_secret + if self.ssh_key_secret: + _dict['sshKeySecret'] = self.ssh_key_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of username + if self.username: + _dict['username'] = self.username.to_dict() + # override the default output from pydantic by calling `to_dict()` of watch_path_config + if self.watch_path_config: + _dict['watchPathConfig'] = self.watch_path_config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SFTPEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "address": SecretKeySelector.from_dict(obj["address"]) if obj.get("address") is not None else None, + "eventType": obj.get("eventType"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "metadata": obj.get("metadata"), + "password": SecretKeySelector.from_dict(obj["password"]) if obj.get("password") is not None else None, + "pollIntervalDuration": obj.get("pollIntervalDuration"), + "sshKeySecret": SecretKeySelector.from_dict(obj["sshKeySecret"]) if obj.get("sshKeySecret") is not None else None, + "username": SecretKeySelector.from_dict(obj["username"]) if obj.get("username") is not None else None, + "watchPathConfig": IoArgoprojEventsV1alpha1WatchPathConfig.from_dict(obj["watchPathConfig"]) if obj.get("watchPathConfig") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_event_source.py new file mode 100644 index 000000000000..793986767e4f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_event_source.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SlackEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1SlackEventSource + """ # noqa: E501 + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + metadata: Optional[Dict[str, StrictStr]] = None + signing_secret: Optional[SecretKeySelector] = Field(default=None, alias="signingSecret") + token: Optional[SecretKeySelector] = None + webhook: Optional[IoArgoprojEventsV1alpha1WebhookContext] = None + __properties: ClassVar[List[str]] = ["filter", "metadata", "signingSecret", "token", "webhook"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SlackEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of signing_secret + if self.signing_secret: + _dict['signingSecret'] = self.signing_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of token + if self.token: + _dict['token'] = self.token.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook + if self.webhook: + _dict['webhook'] = self.webhook.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SlackEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "metadata": obj.get("metadata"), + "signingSecret": SecretKeySelector.from_dict(obj["signingSecret"]) if obj.get("signingSecret") is not None else None, + "token": SecretKeySelector.from_dict(obj["token"]) if obj.get("token") is not None else None, + "webhook": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhook"]) if obj.get("webhook") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_sender.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_sender.py new file mode 100644 index 000000000000..0edbaf4e4b43 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_sender.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SlackSender(BaseModel): + """ + IoArgoprojEventsV1alpha1SlackSender + """ # noqa: E501 + icon: Optional[StrictStr] = None + username: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["icon", "username"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SlackSender from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SlackSender from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "icon": obj.get("icon"), + "username": obj.get("username") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_thread.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_thread.py new file mode 100644 index 000000000000..0e2e3e6f8627 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_thread.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SlackThread(BaseModel): + """ + IoArgoprojEventsV1alpha1SlackThread + """ # noqa: E501 + broadcast_message_to_channel: Optional[StrictBool] = Field(default=None, alias="broadcastMessageToChannel") + message_aggregation_key: Optional[StrictStr] = Field(default=None, alias="messageAggregationKey") + __properties: ClassVar[List[str]] = ["broadcastMessageToChannel", "messageAggregationKey"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SlackThread from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SlackThread from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "broadcastMessageToChannel": obj.get("broadcastMessageToChannel"), + "messageAggregationKey": obj.get("messageAggregationKey") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_trigger.py new file mode 100644 index 000000000000..6a0da35d416f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_slack_trigger.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_sender import IoArgoprojEventsV1alpha1SlackSender +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_thread import IoArgoprojEventsV1alpha1SlackThread +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SlackTrigger(BaseModel): + """ + SlackTrigger refers to the specification of the slack notification trigger. + """ # noqa: E501 + attachments: Optional[StrictStr] = None + blocks: Optional[StrictStr] = None + channel: Optional[StrictStr] = None + message: Optional[StrictStr] = None + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + sender: Optional[IoArgoprojEventsV1alpha1SlackSender] = None + slack_token: Optional[SecretKeySelector] = Field(default=None, alias="slackToken") + thread: Optional[IoArgoprojEventsV1alpha1SlackThread] = None + __properties: ClassVar[List[str]] = ["attachments", "blocks", "channel", "message", "parameters", "sender", "slackToken", "thread"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SlackTrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of sender + if self.sender: + _dict['sender'] = self.sender.to_dict() + # override the default output from pydantic by calling `to_dict()` of slack_token + if self.slack_token: + _dict['slackToken'] = self.slack_token.to_dict() + # override the default output from pydantic by calling `to_dict()` of thread + if self.thread: + _dict['thread'] = self.thread.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SlackTrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "attachments": obj.get("attachments"), + "blocks": obj.get("blocks"), + "channel": obj.get("channel"), + "message": obj.get("message"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "sender": IoArgoprojEventsV1alpha1SlackSender.from_dict(obj["sender"]) if obj.get("sender") is not None else None, + "slackToken": SecretKeySelector.from_dict(obj["slackToken"]) if obj.get("slackToken") is not None else None, + "thread": IoArgoprojEventsV1alpha1SlackThread.from_dict(obj["thread"]) if obj.get("thread") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sns_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sns_event_source.py new file mode 100644 index 000000000000..a74c45804353 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sns_event_source.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SNSEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1SNSEventSource + """ # noqa: E501 + access_key: Optional[SecretKeySelector] = Field(default=None, alias="accessKey") + endpoint: Optional[StrictStr] = None + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + metadata: Optional[Dict[str, StrictStr]] = None + region: Optional[StrictStr] = None + role_arn: Optional[StrictStr] = Field(default=None, alias="roleARN") + secret_key: Optional[SecretKeySelector] = Field(default=None, alias="secretKey") + topic_arn: Optional[StrictStr] = Field(default=None, alias="topicArn") + validate_signature: Optional[StrictBool] = Field(default=None, alias="validateSignature") + webhook: Optional[IoArgoprojEventsV1alpha1WebhookContext] = None + __properties: ClassVar[List[str]] = ["accessKey", "endpoint", "filter", "metadata", "region", "roleARN", "secretKey", "topicArn", "validateSignature", "webhook"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SNSEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_key + if self.access_key: + _dict['accessKey'] = self.access_key.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_key + if self.secret_key: + _dict['secretKey'] = self.secret_key.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook + if self.webhook: + _dict['webhook'] = self.webhook.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SNSEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessKey": SecretKeySelector.from_dict(obj["accessKey"]) if obj.get("accessKey") is not None else None, + "endpoint": obj.get("endpoint"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "metadata": obj.get("metadata"), + "region": obj.get("region"), + "roleARN": obj.get("roleARN"), + "secretKey": SecretKeySelector.from_dict(obj["secretKey"]) if obj.get("secretKey") is not None else None, + "topicArn": obj.get("topicArn"), + "validateSignature": obj.get("validateSignature"), + "webhook": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhook"]) if obj.get("webhook") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sqs_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sqs_event_source.py new file mode 100644 index 000000000000..29102ff05c87 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_sqs_event_source.py @@ -0,0 +1,125 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1SQSEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1SQSEventSource + """ # noqa: E501 + access_key: Optional[SecretKeySelector] = Field(default=None, alias="accessKey") + dlq: Optional[StrictBool] = None + endpoint: Optional[StrictStr] = None + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + json_body: Optional[StrictBool] = Field(default=None, alias="jsonBody") + metadata: Optional[Dict[str, StrictStr]] = None + queue: Optional[StrictStr] = None + queue_account_id: Optional[StrictStr] = Field(default=None, alias="queueAccountId") + region: Optional[StrictStr] = None + role_arn: Optional[StrictStr] = Field(default=None, alias="roleARN") + secret_key: Optional[SecretKeySelector] = Field(default=None, alias="secretKey") + session_token: Optional[SecretKeySelector] = Field(default=None, alias="sessionToken") + wait_time_seconds: Optional[StrictStr] = Field(default=None, description="WaitTimeSeconds is The duration (in seconds) for which the call waits for a message to arrive in the queue before returning.", alias="waitTimeSeconds") + __properties: ClassVar[List[str]] = ["accessKey", "dlq", "endpoint", "filter", "jsonBody", "metadata", "queue", "queueAccountId", "region", "roleARN", "secretKey", "sessionToken", "waitTimeSeconds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SQSEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_key + if self.access_key: + _dict['accessKey'] = self.access_key.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_key + if self.secret_key: + _dict['secretKey'] = self.secret_key.to_dict() + # override the default output from pydantic by calling `to_dict()` of session_token + if self.session_token: + _dict['sessionToken'] = self.session_token.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1SQSEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessKey": SecretKeySelector.from_dict(obj["accessKey"]) if obj.get("accessKey") is not None else None, + "dlq": obj.get("dlq"), + "endpoint": obj.get("endpoint"), + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "jsonBody": obj.get("jsonBody"), + "metadata": obj.get("metadata"), + "queue": obj.get("queue"), + "queueAccountId": obj.get("queueAccountId"), + "region": obj.get("region"), + "roleARN": obj.get("roleARN"), + "secretKey": SecretKeySelector.from_dict(obj["secretKey"]) if obj.get("secretKey") is not None else None, + "sessionToken": SecretKeySelector.from_dict(obj["sessionToken"]) if obj.get("sessionToken") is not None else None, + "waitTimeSeconds": obj.get("waitTimeSeconds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py new file mode 100644 index 000000000000..7a68a69d2e9f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1StandardK8STrigger(BaseModel): + """ + IoArgoprojEventsV1alpha1StandardK8STrigger + """ # noqa: E501 + live_object: Optional[StrictBool] = Field(default=None, alias="liveObject") + operation: Optional[StrictStr] = None + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = Field(default=None, description="Parameters is the list of parameters that is applied to resolved K8s trigger object.") + patch_strategy: Optional[StrictStr] = Field(default=None, alias="patchStrategy") + source: Optional[IoArgoprojEventsV1alpha1ArtifactLocation] = None + __properties: ClassVar[List[str]] = ["liveObject", "operation", "parameters", "patchStrategy", "source"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StandardK8STrigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of source + if self.source: + _dict['source'] = self.source.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StandardK8STrigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "liveObject": obj.get("liveObject"), + "operation": obj.get("operation"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "patchStrategy": obj.get("patchStrategy"), + "source": IoArgoprojEventsV1alpha1ArtifactLocation.from_dict(obj["source"]) if obj.get("source") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_status.py new file mode 100644 index 000000000000..fe4d379ca95b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_status.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_condition import IoArgoprojEventsV1alpha1Condition +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Status(BaseModel): + """ + Status is a common structure which can be used for Status field. + """ # noqa: E501 + conditions: Optional[List[IoArgoprojEventsV1alpha1Condition]] = None + __properties: ClassVar[List[str]] = ["conditions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Status from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in conditions (list) + _items = [] + if self.conditions: + for _item in self.conditions: + if _item: + _items.append(_item.to_dict()) + _dict['conditions'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Status from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "conditions": [IoArgoprojEventsV1alpha1Condition.from_dict(_item) for _item in obj["conditions"]] if obj.get("conditions") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_status_policy.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_status_policy.py new file mode 100644 index 000000000000..bfbd8c6d281b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_status_policy.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1StatusPolicy(BaseModel): + """ + IoArgoprojEventsV1alpha1StatusPolicy + """ # noqa: E501 + allow: Optional[List[StrictInt]] = None + __properties: ClassVar[List[str]] = ["allow"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StatusPolicy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StatusPolicy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "allow": obj.get("allow") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_storage_grid_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_storage_grid_event_source.py new file mode 100644 index 000000000000..302e7e853cd8 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_storage_grid_event_source.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_storage_grid_filter import IoArgoprojEventsV1alpha1StorageGridFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1StorageGridEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1StorageGridEventSource + """ # noqa: E501 + api_url: Optional[StrictStr] = Field(default=None, description="APIURL is the url of the storagegrid api.", alias="apiURL") + auth_token: Optional[SecretKeySelector] = Field(default=None, alias="authToken") + bucket: Optional[StrictStr] = Field(default=None, description="Name of the bucket to register notifications for.") + events: Optional[List[StrictStr]] = None + filter: Optional[IoArgoprojEventsV1alpha1StorageGridFilter] = None + metadata: Optional[Dict[str, StrictStr]] = None + region: Optional[StrictStr] = None + topic_arn: Optional[StrictStr] = Field(default=None, alias="topicArn") + webhook: Optional[IoArgoprojEventsV1alpha1WebhookContext] = None + __properties: ClassVar[List[str]] = ["apiURL", "authToken", "bucket", "events", "filter", "metadata", "region", "topicArn", "webhook"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StorageGridEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth_token + if self.auth_token: + _dict['authToken'] = self.auth_token.to_dict() + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook + if self.webhook: + _dict['webhook'] = self.webhook.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StorageGridEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiURL": obj.get("apiURL"), + "authToken": SecretKeySelector.from_dict(obj["authToken"]) if obj.get("authToken") is not None else None, + "bucket": obj.get("bucket"), + "events": obj.get("events"), + "filter": IoArgoprojEventsV1alpha1StorageGridFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "metadata": obj.get("metadata"), + "region": obj.get("region"), + "topicArn": obj.get("topicArn"), + "webhook": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhook"]) if obj.get("webhook") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_storage_grid_filter.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_storage_grid_filter.py new file mode 100644 index 000000000000..287aefd9e5ac --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_storage_grid_filter.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1StorageGridFilter(BaseModel): + """ + IoArgoprojEventsV1alpha1StorageGridFilter + """ # noqa: E501 + prefix: Optional[StrictStr] = None + suffix: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["prefix", "suffix"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StorageGridFilter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StorageGridFilter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "prefix": obj.get("prefix"), + "suffix": obj.get("suffix") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_stripe_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_stripe_event_source.py new file mode 100644 index 000000000000..a143bc606bc8 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_stripe_event_source.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1StripeEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1StripeEventSource + """ # noqa: E501 + api_key: Optional[SecretKeySelector] = Field(default=None, alias="apiKey") + create_webhook: Optional[StrictBool] = Field(default=None, alias="createWebhook") + event_filter: Optional[List[StrictStr]] = Field(default=None, alias="eventFilter") + metadata: Optional[Dict[str, StrictStr]] = None + webhook: Optional[IoArgoprojEventsV1alpha1WebhookContext] = None + __properties: ClassVar[List[str]] = ["apiKey", "createWebhook", "eventFilter", "metadata", "webhook"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StripeEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of api_key + if self.api_key: + _dict['apiKey'] = self.api_key.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook + if self.webhook: + _dict['webhook'] = self.webhook.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1StripeEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiKey": SecretKeySelector.from_dict(obj["apiKey"]) if obj.get("apiKey") is not None else None, + "createWebhook": obj.get("createWebhook"), + "eventFilter": obj.get("eventFilter"), + "metadata": obj.get("metadata"), + "webhook": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhook"]) if obj.get("webhook") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_template.py new file mode 100644 index 000000000000..ec8d77e9d47e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_template.py @@ -0,0 +1,147 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.affinity import Affinity +from argo_workflows.models.container import Container +from argo_workflows.models.io_argoproj_events_v1alpha1_metadata import IoArgoprojEventsV1alpha1Metadata +from argo_workflows.models.local_object_reference import LocalObjectReference +from argo_workflows.models.pod_security_context import PodSecurityContext +from argo_workflows.models.toleration import Toleration +from argo_workflows.models.volume import Volume +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Template(BaseModel): + """ + IoArgoprojEventsV1alpha1Template + """ # noqa: E501 + affinity: Optional[Affinity] = None + container: Optional[Container] = None + image_pull_secrets: Optional[List[LocalObjectReference]] = Field(default=None, alias="imagePullSecrets") + metadata: Optional[IoArgoprojEventsV1alpha1Metadata] = None + node_selector: Optional[Dict[str, StrictStr]] = Field(default=None, alias="nodeSelector") + priority: Optional[StrictInt] = None + priority_class_name: Optional[StrictStr] = Field(default=None, alias="priorityClassName") + security_context: Optional[PodSecurityContext] = Field(default=None, alias="securityContext") + service_account_name: Optional[StrictStr] = Field(default=None, alias="serviceAccountName") + tolerations: Optional[List[Toleration]] = None + volumes: Optional[List[Volume]] = None + __properties: ClassVar[List[str]] = ["affinity", "container", "imagePullSecrets", "metadata", "nodeSelector", "priority", "priorityClassName", "securityContext", "serviceAccountName", "tolerations", "volumes"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Template from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of affinity + if self.affinity: + _dict['affinity'] = self.affinity.to_dict() + # override the default output from pydantic by calling `to_dict()` of container + if self.container: + _dict['container'] = self.container.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in image_pull_secrets (list) + _items = [] + if self.image_pull_secrets: + for _item in self.image_pull_secrets: + if _item: + _items.append(_item.to_dict()) + _dict['imagePullSecrets'] = _items + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of security_context + if self.security_context: + _dict['securityContext'] = self.security_context.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in tolerations (list) + _items = [] + if self.tolerations: + for _item in self.tolerations: + if _item: + _items.append(_item.to_dict()) + _dict['tolerations'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in volumes (list) + _items = [] + if self.volumes: + for _item in self.volumes: + if _item: + _items.append(_item.to_dict()) + _dict['volumes'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Template from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "affinity": Affinity.from_dict(obj["affinity"]) if obj.get("affinity") is not None else None, + "container": Container.from_dict(obj["container"]) if obj.get("container") is not None else None, + "imagePullSecrets": [LocalObjectReference.from_dict(_item) for _item in obj["imagePullSecrets"]] if obj.get("imagePullSecrets") is not None else None, + "metadata": IoArgoprojEventsV1alpha1Metadata.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "nodeSelector": obj.get("nodeSelector"), + "priority": obj.get("priority"), + "priorityClassName": obj.get("priorityClassName"), + "securityContext": PodSecurityContext.from_dict(obj["securityContext"]) if obj.get("securityContext") is not None else None, + "serviceAccountName": obj.get("serviceAccountName"), + "tolerations": [Toleration.from_dict(_item) for _item in obj["tolerations"]] if obj.get("tolerations") is not None else None, + "volumes": [Volume.from_dict(_item) for _item in obj["volumes"]] if obj.get("volumes") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_time_filter.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_time_filter.py new file mode 100644 index 000000000000..115c2a9fa2f3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_time_filter.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1TimeFilter(BaseModel): + """ + TimeFilter describes a window in time. It filters out events that occur outside the time limits. In other words, only events that occur after Start and before Stop will pass this filter. + """ # noqa: E501 + start: Optional[StrictStr] = Field(default=None, description="Start is the beginning of a time window in UTC. Before this time, events for this dependency are ignored. Format is hh:mm:ss.") + stop: Optional[StrictStr] = Field(default=None, description="Stop is the end of a time window in UTC. After or equal to this time, events for this dependency are ignored and Format is hh:mm:ss. If it is smaller than Start, it is treated as next day of Start (e.g.: 22:00:00-01:00:00 means 22:00:00-25:00:00).") + __properties: ClassVar[List[str]] = ["start", "stop"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TimeFilter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TimeFilter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "start": obj.get("start"), + "stop": obj.get("stop") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_tls_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_tls_config.py new file mode 100644 index 000000000000..7832b60d58bc --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_tls_config.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1TLSConfig(BaseModel): + """ + TLSConfig refers to TLS configuration for a client. + """ # noqa: E501 + ca_cert_secret: Optional[SecretKeySelector] = Field(default=None, alias="caCertSecret") + client_cert_secret: Optional[SecretKeySelector] = Field(default=None, alias="clientCertSecret") + client_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="clientKeySecret") + insecure_skip_verify: Optional[StrictBool] = Field(default=None, alias="insecureSkipVerify") + __properties: ClassVar[List[str]] = ["caCertSecret", "clientCertSecret", "clientKeySecret", "insecureSkipVerify"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TLSConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of ca_cert_secret + if self.ca_cert_secret: + _dict['caCertSecret'] = self.ca_cert_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of client_cert_secret + if self.client_cert_secret: + _dict['clientCertSecret'] = self.client_cert_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of client_key_secret + if self.client_key_secret: + _dict['clientKeySecret'] = self.client_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TLSConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "caCertSecret": SecretKeySelector.from_dict(obj["caCertSecret"]) if obj.get("caCertSecret") is not None else None, + "clientCertSecret": SecretKeySelector.from_dict(obj["clientCertSecret"]) if obj.get("clientCertSecret") is not None else None, + "clientKeySecret": SecretKeySelector.from_dict(obj["clientKeySecret"]) if obj.get("clientKeySecret") is not None else None, + "insecureSkipVerify": obj.get("insecureSkipVerify") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger.py new file mode 100644 index 000000000000..74bc046112ed --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff +from argo_workflows.models.io_argoproj_events_v1alpha1_rate_limit import IoArgoprojEventsV1alpha1RateLimit +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_policy import IoArgoprojEventsV1alpha1TriggerPolicy +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_template import IoArgoprojEventsV1alpha1TriggerTemplate +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1Trigger(BaseModel): + """ + IoArgoprojEventsV1alpha1Trigger + """ # noqa: E501 + at_least_once: Optional[StrictBool] = Field(default=None, alias="atLeastOnce") + parameters: Optional[List[IoArgoprojEventsV1alpha1TriggerParameter]] = None + policy: Optional[IoArgoprojEventsV1alpha1TriggerPolicy] = None + rate_limit: Optional[IoArgoprojEventsV1alpha1RateLimit] = Field(default=None, alias="rateLimit") + retry_strategy: Optional[IoArgoprojEventsV1alpha1Backoff] = Field(default=None, alias="retryStrategy") + template: Optional[IoArgoprojEventsV1alpha1TriggerTemplate] = None + __properties: ClassVar[List[str]] = ["atLeastOnce", "parameters", "policy", "rateLimit", "retryStrategy", "template"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Trigger from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + # override the default output from pydantic by calling `to_dict()` of policy + if self.policy: + _dict['policy'] = self.policy.to_dict() + # override the default output from pydantic by calling `to_dict()` of rate_limit + if self.rate_limit: + _dict['rateLimit'] = self.rate_limit.to_dict() + # override the default output from pydantic by calling `to_dict()` of retry_strategy + if self.retry_strategy: + _dict['retryStrategy'] = self.retry_strategy.to_dict() + # override the default output from pydantic by calling `to_dict()` of template + if self.template: + _dict['template'] = self.template.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1Trigger from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "atLeastOnce": obj.get("atLeastOnce"), + "parameters": [IoArgoprojEventsV1alpha1TriggerParameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "policy": IoArgoprojEventsV1alpha1TriggerPolicy.from_dict(obj["policy"]) if obj.get("policy") is not None else None, + "rateLimit": IoArgoprojEventsV1alpha1RateLimit.from_dict(obj["rateLimit"]) if obj.get("rateLimit") is not None else None, + "retryStrategy": IoArgoprojEventsV1alpha1Backoff.from_dict(obj["retryStrategy"]) if obj.get("retryStrategy") is not None else None, + "template": IoArgoprojEventsV1alpha1TriggerTemplate.from_dict(obj["template"]) if obj.get("template") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_parameter.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_parameter.py new file mode 100644 index 000000000000..62f661eefe9e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_parameter.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter_source import IoArgoprojEventsV1alpha1TriggerParameterSource +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1TriggerParameter(BaseModel): + """ + IoArgoprojEventsV1alpha1TriggerParameter + """ # noqa: E501 + dest: Optional[StrictStr] = Field(default=None, description="Dest is the JSONPath of a resource key. A path is a series of keys separated by a dot. The colon character can be escaped with '.' The -1 key can be used to append a value to an existing array. See https://github.com/tidwall/sjson#path-syntax for more information about how this is used.") + operation: Optional[StrictStr] = Field(default=None, description="Operation is what to do with the existing value at Dest, whether to 'prepend', 'overwrite', or 'append' it.") + src: Optional[IoArgoprojEventsV1alpha1TriggerParameterSource] = None + __properties: ClassVar[List[str]] = ["dest", "operation", "src"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TriggerParameter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of src + if self.src: + _dict['src'] = self.src.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TriggerParameter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "dest": obj.get("dest"), + "operation": obj.get("operation"), + "src": IoArgoprojEventsV1alpha1TriggerParameterSource.from_dict(obj["src"]) if obj.get("src") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_parameter_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_parameter_source.py new file mode 100644 index 000000000000..ce9a6b202161 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_parameter_source.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1TriggerParameterSource(BaseModel): + """ + IoArgoprojEventsV1alpha1TriggerParameterSource + """ # noqa: E501 + context_key: Optional[StrictStr] = Field(default=None, description="ContextKey is the JSONPath of the event's (JSON decoded) context key ContextKey is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.", alias="contextKey") + context_template: Optional[StrictStr] = Field(default=None, alias="contextTemplate") + data_key: Optional[StrictStr] = Field(default=None, description="DataKey is the JSONPath of the event's (JSON decoded) data key DataKey is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this.", alias="dataKey") + data_template: Optional[StrictStr] = Field(default=None, alias="dataTemplate") + dependency_name: Optional[StrictStr] = Field(default=None, description="DependencyName refers to the name of the dependency. The event which is stored for this dependency is used as payload for the parameterization. Make sure to refer to one of the dependencies you have defined under Dependencies list.", alias="dependencyName") + use_raw_data: Optional[StrictBool] = Field(default=None, alias="useRawData") + value: Optional[StrictStr] = Field(default=None, description="Value is the default literal value to use for this parameter source This is only used if the DataKey is invalid. If the DataKey is invalid and this is not defined, this param source will produce an error.") + __properties: ClassVar[List[str]] = ["contextKey", "contextTemplate", "dataKey", "dataTemplate", "dependencyName", "useRawData", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TriggerParameterSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TriggerParameterSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "contextKey": obj.get("contextKey"), + "contextTemplate": obj.get("contextTemplate"), + "dataKey": obj.get("dataKey"), + "dataTemplate": obj.get("dataTemplate"), + "dependencyName": obj.get("dependencyName"), + "useRawData": obj.get("useRawData"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_policy.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_policy.py new file mode 100644 index 000000000000..e6855b2e9854 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_policy.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_k8_s_resource_policy import IoArgoprojEventsV1alpha1K8SResourcePolicy +from argo_workflows.models.io_argoproj_events_v1alpha1_status_policy import IoArgoprojEventsV1alpha1StatusPolicy +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1TriggerPolicy(BaseModel): + """ + IoArgoprojEventsV1alpha1TriggerPolicy + """ # noqa: E501 + k8s: Optional[IoArgoprojEventsV1alpha1K8SResourcePolicy] = None + status: Optional[IoArgoprojEventsV1alpha1StatusPolicy] = None + __properties: ClassVar[List[str]] = ["k8s", "status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TriggerPolicy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of k8s + if self.k8s: + _dict['k8s'] = self.k8s.to_dict() + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TriggerPolicy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "k8s": IoArgoprojEventsV1alpha1K8SResourcePolicy.from_dict(obj["k8s"]) if obj.get("k8s") is not None else None, + "status": IoArgoprojEventsV1alpha1StatusPolicy.from_dict(obj["status"]) if obj.get("status") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_template.py new file mode 100644 index 000000000000..255ee6474a53 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_trigger_template.py @@ -0,0 +1,183 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_argo_workflow_trigger import IoArgoprojEventsV1alpha1ArgoWorkflowTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_aws_lambda_trigger import IoArgoprojEventsV1alpha1AWSLambdaTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_event_hubs_trigger import IoArgoprojEventsV1alpha1AzureEventHubsTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_service_bus_trigger import IoArgoprojEventsV1alpha1AzureServiceBusTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_conditions_reset_criteria import IoArgoprojEventsV1alpha1ConditionsResetCriteria +from argo_workflows.models.io_argoproj_events_v1alpha1_custom_trigger import IoArgoprojEventsV1alpha1CustomTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_email_trigger import IoArgoprojEventsV1alpha1EmailTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_http_trigger import IoArgoprojEventsV1alpha1HTTPTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_trigger import IoArgoprojEventsV1alpha1KafkaTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_log_trigger import IoArgoprojEventsV1alpha1LogTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_trigger import IoArgoprojEventsV1alpha1NATSTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_open_whisk_trigger import IoArgoprojEventsV1alpha1OpenWhiskTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_pulsar_trigger import IoArgoprojEventsV1alpha1PulsarTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_trigger import IoArgoprojEventsV1alpha1SlackTrigger +from argo_workflows.models.io_argoproj_events_v1alpha1_standard_k8_s_trigger import IoArgoprojEventsV1alpha1StandardK8STrigger +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1TriggerTemplate(BaseModel): + """ + TriggerTemplate is the template that describes trigger specification. + """ # noqa: E501 + argo_workflow: Optional[IoArgoprojEventsV1alpha1ArgoWorkflowTrigger] = Field(default=None, alias="argoWorkflow") + aws_lambda: Optional[IoArgoprojEventsV1alpha1AWSLambdaTrigger] = Field(default=None, alias="awsLambda") + azure_event_hubs: Optional[IoArgoprojEventsV1alpha1AzureEventHubsTrigger] = Field(default=None, alias="azureEventHubs") + azure_service_bus: Optional[IoArgoprojEventsV1alpha1AzureServiceBusTrigger] = Field(default=None, alias="azureServiceBus") + conditions: Optional[StrictStr] = None + conditions_reset: Optional[List[IoArgoprojEventsV1alpha1ConditionsResetCriteria]] = Field(default=None, alias="conditionsReset") + custom: Optional[IoArgoprojEventsV1alpha1CustomTrigger] = None + email: Optional[IoArgoprojEventsV1alpha1EmailTrigger] = None + http: Optional[IoArgoprojEventsV1alpha1HTTPTrigger] = None + k8s: Optional[IoArgoprojEventsV1alpha1StandardK8STrigger] = None + kafka: Optional[IoArgoprojEventsV1alpha1KafkaTrigger] = None + log: Optional[IoArgoprojEventsV1alpha1LogTrigger] = None + name: Optional[StrictStr] = Field(default=None, description="Name is a unique name of the action to take.") + nats: Optional[IoArgoprojEventsV1alpha1NATSTrigger] = None + open_whisk: Optional[IoArgoprojEventsV1alpha1OpenWhiskTrigger] = Field(default=None, alias="openWhisk") + pulsar: Optional[IoArgoprojEventsV1alpha1PulsarTrigger] = None + slack: Optional[IoArgoprojEventsV1alpha1SlackTrigger] = None + __properties: ClassVar[List[str]] = ["argoWorkflow", "awsLambda", "azureEventHubs", "azureServiceBus", "conditions", "conditionsReset", "custom", "email", "http", "k8s", "kafka", "log", "name", "nats", "openWhisk", "pulsar", "slack"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TriggerTemplate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of argo_workflow + if self.argo_workflow: + _dict['argoWorkflow'] = self.argo_workflow.to_dict() + # override the default output from pydantic by calling `to_dict()` of aws_lambda + if self.aws_lambda: + _dict['awsLambda'] = self.aws_lambda.to_dict() + # override the default output from pydantic by calling `to_dict()` of azure_event_hubs + if self.azure_event_hubs: + _dict['azureEventHubs'] = self.azure_event_hubs.to_dict() + # override the default output from pydantic by calling `to_dict()` of azure_service_bus + if self.azure_service_bus: + _dict['azureServiceBus'] = self.azure_service_bus.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in conditions_reset (list) + _items = [] + if self.conditions_reset: + for _item in self.conditions_reset: + if _item: + _items.append(_item.to_dict()) + _dict['conditionsReset'] = _items + # override the default output from pydantic by calling `to_dict()` of custom + if self.custom: + _dict['custom'] = self.custom.to_dict() + # override the default output from pydantic by calling `to_dict()` of email + if self.email: + _dict['email'] = self.email.to_dict() + # override the default output from pydantic by calling `to_dict()` of http + if self.http: + _dict['http'] = self.http.to_dict() + # override the default output from pydantic by calling `to_dict()` of k8s + if self.k8s: + _dict['k8s'] = self.k8s.to_dict() + # override the default output from pydantic by calling `to_dict()` of kafka + if self.kafka: + _dict['kafka'] = self.kafka.to_dict() + # override the default output from pydantic by calling `to_dict()` of log + if self.log: + _dict['log'] = self.log.to_dict() + # override the default output from pydantic by calling `to_dict()` of nats + if self.nats: + _dict['nats'] = self.nats.to_dict() + # override the default output from pydantic by calling `to_dict()` of open_whisk + if self.open_whisk: + _dict['openWhisk'] = self.open_whisk.to_dict() + # override the default output from pydantic by calling `to_dict()` of pulsar + if self.pulsar: + _dict['pulsar'] = self.pulsar.to_dict() + # override the default output from pydantic by calling `to_dict()` of slack + if self.slack: + _dict['slack'] = self.slack.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1TriggerTemplate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "argoWorkflow": IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.from_dict(obj["argoWorkflow"]) if obj.get("argoWorkflow") is not None else None, + "awsLambda": IoArgoprojEventsV1alpha1AWSLambdaTrigger.from_dict(obj["awsLambda"]) if obj.get("awsLambda") is not None else None, + "azureEventHubs": IoArgoprojEventsV1alpha1AzureEventHubsTrigger.from_dict(obj["azureEventHubs"]) if obj.get("azureEventHubs") is not None else None, + "azureServiceBus": IoArgoprojEventsV1alpha1AzureServiceBusTrigger.from_dict(obj["azureServiceBus"]) if obj.get("azureServiceBus") is not None else None, + "conditions": obj.get("conditions"), + "conditionsReset": [IoArgoprojEventsV1alpha1ConditionsResetCriteria.from_dict(_item) for _item in obj["conditionsReset"]] if obj.get("conditionsReset") is not None else None, + "custom": IoArgoprojEventsV1alpha1CustomTrigger.from_dict(obj["custom"]) if obj.get("custom") is not None else None, + "email": IoArgoprojEventsV1alpha1EmailTrigger.from_dict(obj["email"]) if obj.get("email") is not None else None, + "http": IoArgoprojEventsV1alpha1HTTPTrigger.from_dict(obj["http"]) if obj.get("http") is not None else None, + "k8s": IoArgoprojEventsV1alpha1StandardK8STrigger.from_dict(obj["k8s"]) if obj.get("k8s") is not None else None, + "kafka": IoArgoprojEventsV1alpha1KafkaTrigger.from_dict(obj["kafka"]) if obj.get("kafka") is not None else None, + "log": IoArgoprojEventsV1alpha1LogTrigger.from_dict(obj["log"]) if obj.get("log") is not None else None, + "name": obj.get("name"), + "nats": IoArgoprojEventsV1alpha1NATSTrigger.from_dict(obj["nats"]) if obj.get("nats") is not None else None, + "openWhisk": IoArgoprojEventsV1alpha1OpenWhiskTrigger.from_dict(obj["openWhisk"]) if obj.get("openWhisk") is not None else None, + "pulsar": IoArgoprojEventsV1alpha1PulsarTrigger.from_dict(obj["pulsar"]) if obj.get("pulsar") is not None else None, + "slack": IoArgoprojEventsV1alpha1SlackTrigger.from_dict(obj["slack"]) if obj.get("slack") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_url_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_url_artifact.py new file mode 100644 index 000000000000..13e7e11ed1a7 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_url_artifact.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1URLArtifact(BaseModel): + """ + URLArtifact contains information about an artifact at an http endpoint. + """ # noqa: E501 + path: Optional[StrictStr] = None + verify_cert: Optional[StrictBool] = Field(default=None, alias="verifyCert") + __properties: ClassVar[List[str]] = ["path", "verifyCert"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1URLArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1URLArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "path": obj.get("path"), + "verifyCert": obj.get("verifyCert") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_value_from_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_value_from_source.py new file mode 100644 index 000000000000..a8c455cafc6d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_value_from_source.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1ValueFromSource(BaseModel): + """ + IoArgoprojEventsV1alpha1ValueFromSource + """ # noqa: E501 + config_map_key_ref: Optional[ConfigMapKeySelector] = Field(default=None, alias="configMapKeyRef") + secret_key_ref: Optional[SecretKeySelector] = Field(default=None, alias="secretKeyRef") + __properties: ClassVar[List[str]] = ["configMapKeyRef", "secretKeyRef"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ValueFromSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config_map_key_ref + if self.config_map_key_ref: + _dict['configMapKeyRef'] = self.config_map_key_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_key_ref + if self.secret_key_ref: + _dict['secretKeyRef'] = self.secret_key_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1ValueFromSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configMapKeyRef": ConfigMapKeySelector.from_dict(obj["configMapKeyRef"]) if obj.get("configMapKeyRef") is not None else None, + "secretKeyRef": SecretKeySelector.from_dict(obj["secretKeyRef"]) if obj.get("secretKeyRef") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_watch_path_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_watch_path_config.py new file mode 100644 index 000000000000..8ca710fa1694 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_watch_path_config.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1WatchPathConfig(BaseModel): + """ + IoArgoprojEventsV1alpha1WatchPathConfig + """ # noqa: E501 + directory: Optional[StrictStr] = None + path: Optional[StrictStr] = None + path_regexp: Optional[StrictStr] = Field(default=None, alias="pathRegexp") + __properties: ClassVar[List[str]] = ["directory", "path", "pathRegexp"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1WatchPathConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1WatchPathConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "directory": obj.get("directory"), + "path": obj.get("path"), + "pathRegexp": obj.get("pathRegexp") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_webhook_context.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_webhook_context.py new file mode 100644 index 000000000000..a3a9b583f8a5 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_webhook_context.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1WebhookContext(BaseModel): + """ + IoArgoprojEventsV1alpha1WebhookContext + """ # noqa: E501 + auth_secret: Optional[SecretKeySelector] = Field(default=None, alias="authSecret") + endpoint: Optional[StrictStr] = None + max_payload_size: Optional[StrictStr] = Field(default=None, alias="maxPayloadSize") + metadata: Optional[Dict[str, StrictStr]] = None + method: Optional[StrictStr] = None + port: Optional[StrictStr] = Field(default=None, description="Port on which HTTP server is listening for incoming events.") + server_cert_secret: Optional[SecretKeySelector] = Field(default=None, alias="serverCertSecret") + server_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="serverKeySecret") + url: Optional[StrictStr] = Field(default=None, description="URL is the url of the server.") + __properties: ClassVar[List[str]] = ["authSecret", "endpoint", "maxPayloadSize", "metadata", "method", "port", "serverCertSecret", "serverKeySecret", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1WebhookContext from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth_secret + if self.auth_secret: + _dict['authSecret'] = self.auth_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of server_cert_secret + if self.server_cert_secret: + _dict['serverCertSecret'] = self.server_cert_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of server_key_secret + if self.server_key_secret: + _dict['serverKeySecret'] = self.server_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1WebhookContext from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "authSecret": SecretKeySelector.from_dict(obj["authSecret"]) if obj.get("authSecret") is not None else None, + "endpoint": obj.get("endpoint"), + "maxPayloadSize": obj.get("maxPayloadSize"), + "metadata": obj.get("metadata"), + "method": obj.get("method"), + "port": obj.get("port"), + "serverCertSecret": SecretKeySelector.from_dict(obj["serverCertSecret"]) if obj.get("serverCertSecret") is not None else None, + "serverKeySecret": SecretKeySelector.from_dict(obj["serverKeySecret"]) if obj.get("serverKeySecret") is not None else None, + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_webhook_event_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_webhook_event_source.py new file mode 100644 index 000000000000..8318f732a5fa --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_events_v1alpha1_webhook_event_source.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojEventsV1alpha1WebhookEventSource(BaseModel): + """ + IoArgoprojEventsV1alpha1WebhookEventSource + """ # noqa: E501 + filter: Optional[IoArgoprojEventsV1alpha1EventSourceFilter] = None + webhook_context: Optional[IoArgoprojEventsV1alpha1WebhookContext] = Field(default=None, alias="webhookContext") + __properties: ClassVar[List[str]] = ["filter", "webhookContext"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1WebhookEventSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of filter + if self.filter: + _dict['filter'] = self.filter.to_dict() + # override the default output from pydantic by calling `to_dict()` of webhook_context + if self.webhook_context: + _dict['webhookContext'] = self.webhook_context.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojEventsV1alpha1WebhookEventSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "filter": IoArgoprojEventsV1alpha1EventSourceFilter.from_dict(obj["filter"]) if obj.get("filter") is not None else None, + "webhookContext": IoArgoprojEventsV1alpha1WebhookContext.from_dict(obj["webhookContext"]) if obj.get("webhookContext") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_archive_strategy.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_archive_strategy.py new file mode 100644 index 000000000000..c32ec7aed8c2 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_archive_strategy.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_tar_strategy import IoArgoprojWorkflowV1alpha1TarStrategy +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArchiveStrategy(BaseModel): + """ + ArchiveStrategy describes how to archive files/directory when saving artifacts + """ # noqa: E501 + var_none: Optional[Dict[str, Any]] = Field(default=None, description="NoneStrategy indicates to skip tar process and upload the files or directory tree as independent files. Note that if the artifact is a directory, the artifact driver must support the ability to save/load the directory appropriately.", alias="none") + tar: Optional[IoArgoprojWorkflowV1alpha1TarStrategy] = None + zip: Optional[Dict[str, Any]] = Field(default=None, description="ZipStrategy will unzip zipped input artifacts") + __properties: ClassVar[List[str]] = ["none", "tar", "zip"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArchiveStrategy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of tar + if self.tar: + _dict['tar'] = self.tar.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArchiveStrategy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "none": obj.get("none"), + "tar": IoArgoprojWorkflowV1alpha1TarStrategy.from_dict(obj["tar"]) if obj.get("tar") is not None else None, + "zip": obj.get("zip") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_arguments.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_arguments.py new file mode 100644 index 000000000000..ce4f3df7d460 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_arguments.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Arguments(BaseModel): + """ + Arguments to a template + """ # noqa: E501 + artifacts: Optional[List[IoArgoprojWorkflowV1alpha1Artifact]] = Field(default=None, description="Artifacts is the list of artifacts to pass to the template or workflow") + parameters: Optional[List[IoArgoprojWorkflowV1alpha1Parameter]] = Field(default=None, description="Parameters is the list of parameters to pass to the template or workflow") + __properties: ClassVar[List[str]] = ["artifacts", "parameters"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Arguments from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in artifacts (list) + _items = [] + if self.artifacts: + for _item in self.artifacts: + if _item: + _items.append(_item.to_dict()) + _dict['artifacts'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Arguments from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifacts": [IoArgoprojWorkflowV1alpha1Artifact.from_dict(_item) for _item in obj["artifacts"]] if obj.get("artifacts") is not None else None, + "parameters": [IoArgoprojWorkflowV1alpha1Parameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_art_gc_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_art_gc_status.py new file mode 100644 index 000000000000..12059f68038c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_art_gc_status.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtGCStatus(BaseModel): + """ + ArtGCStatus maintains state related to ArtifactGC + """ # noqa: E501 + not_specified: Optional[StrictBool] = Field(default=None, description="if this is true, we already checked to see if we need to do it and we don't", alias="notSpecified") + pods_recouped: Optional[Dict[str, StrictBool]] = Field(default=None, description="have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once", alias="podsRecouped") + strategies_processed: Optional[Dict[str, StrictBool]] = Field(default=None, description="have Pods been started to perform this strategy? (enables us not to re-process what we've already done)", alias="strategiesProcessed") + __properties: ClassVar[List[str]] = ["notSpecified", "podsRecouped", "strategiesProcessed"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtGCStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtGCStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "notSpecified": obj.get("notSpecified"), + "podsRecouped": obj.get("podsRecouped"), + "strategiesProcessed": obj.get("strategiesProcessed") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact.py new file mode 100644 index 000000000000..4f76358269ba --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact.py @@ -0,0 +1,173 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Artifact(BaseModel): + """ + Artifact indicates an artifact to place at a specified path + """ # noqa: E501 + archive: Optional[IoArgoprojWorkflowV1alpha1ArchiveStrategy] = None + archive_logs: Optional[StrictBool] = Field(default=None, description="ArchiveLogs indicates if the container logs should be archived", alias="archiveLogs") + artifact_gc: Optional[IoArgoprojWorkflowV1alpha1ArtifactGC] = Field(default=None, alias="artifactGC") + artifactory: Optional[IoArgoprojWorkflowV1alpha1ArtifactoryArtifact] = None + azure: Optional[IoArgoprojWorkflowV1alpha1AzureArtifact] = None + deleted: Optional[StrictBool] = Field(default=None, description="Has this been deleted?") + var_from: Optional[StrictStr] = Field(default=None, description="From allows an artifact to reference an artifact from a previous step", alias="from") + from_expression: Optional[StrictStr] = Field(default=None, description="FromExpression, if defined, is evaluated to specify the value for the artifact", alias="fromExpression") + gcs: Optional[IoArgoprojWorkflowV1alpha1GCSArtifact] = None + git: Optional[IoArgoprojWorkflowV1alpha1GitArtifact] = None + global_name: Optional[StrictStr] = Field(default=None, description="GlobalName exports an output artifact to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.artifacts.XXXX}} and in workflow.status.outputs.artifacts", alias="globalName") + hdfs: Optional[IoArgoprojWorkflowV1alpha1HDFSArtifact] = None + http: Optional[IoArgoprojWorkflowV1alpha1HTTPArtifact] = None + mode: Optional[StrictInt] = Field(default=None, description="mode bits to use on this file, must be a value between 0 and 0777 set when loading input artifacts.") + name: StrictStr = Field(description="name of the artifact. must be unique within a template's inputs/outputs.") + optional: Optional[StrictBool] = Field(default=None, description="Make Artifacts optional, if Artifacts doesn't generate or exist") + oss: Optional[IoArgoprojWorkflowV1alpha1OSSArtifact] = None + path: Optional[StrictStr] = Field(default=None, description="Path is the container path to the artifact") + raw: Optional[IoArgoprojWorkflowV1alpha1RawArtifact] = None + recurse_mode: Optional[StrictBool] = Field(default=None, description="If mode is set, apply the permission recursively into the artifact if it is a folder", alias="recurseMode") + s3: Optional[IoArgoprojWorkflowV1alpha1S3Artifact] = None + sub_path: Optional[StrictStr] = Field(default=None, description="SubPath allows an artifact to be sourced from a subpath within the specified source", alias="subPath") + __properties: ClassVar[List[str]] = ["archive", "archiveLogs", "artifactGC", "artifactory", "azure", "deleted", "from", "fromExpression", "gcs", "git", "globalName", "hdfs", "http", "mode", "name", "optional", "oss", "path", "raw", "recurseMode", "s3", "subPath"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Artifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of archive + if self.archive: + _dict['archive'] = self.archive.to_dict() + # override the default output from pydantic by calling `to_dict()` of artifact_gc + if self.artifact_gc: + _dict['artifactGC'] = self.artifact_gc.to_dict() + # override the default output from pydantic by calling `to_dict()` of artifactory + if self.artifactory: + _dict['artifactory'] = self.artifactory.to_dict() + # override the default output from pydantic by calling `to_dict()` of azure + if self.azure: + _dict['azure'] = self.azure.to_dict() + # override the default output from pydantic by calling `to_dict()` of gcs + if self.gcs: + _dict['gcs'] = self.gcs.to_dict() + # override the default output from pydantic by calling `to_dict()` of git + if self.git: + _dict['git'] = self.git.to_dict() + # override the default output from pydantic by calling `to_dict()` of hdfs + if self.hdfs: + _dict['hdfs'] = self.hdfs.to_dict() + # override the default output from pydantic by calling `to_dict()` of http + if self.http: + _dict['http'] = self.http.to_dict() + # override the default output from pydantic by calling `to_dict()` of oss + if self.oss: + _dict['oss'] = self.oss.to_dict() + # override the default output from pydantic by calling `to_dict()` of raw + if self.raw: + _dict['raw'] = self.raw.to_dict() + # override the default output from pydantic by calling `to_dict()` of s3 + if self.s3: + _dict['s3'] = self.s3.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Artifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "archive": IoArgoprojWorkflowV1alpha1ArchiveStrategy.from_dict(obj["archive"]) if obj.get("archive") is not None else None, + "archiveLogs": obj.get("archiveLogs"), + "artifactGC": IoArgoprojWorkflowV1alpha1ArtifactGC.from_dict(obj["artifactGC"]) if obj.get("artifactGC") is not None else None, + "artifactory": IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.from_dict(obj["artifactory"]) if obj.get("artifactory") is not None else None, + "azure": IoArgoprojWorkflowV1alpha1AzureArtifact.from_dict(obj["azure"]) if obj.get("azure") is not None else None, + "deleted": obj.get("deleted"), + "from": obj.get("from"), + "fromExpression": obj.get("fromExpression"), + "gcs": IoArgoprojWorkflowV1alpha1GCSArtifact.from_dict(obj["gcs"]) if obj.get("gcs") is not None else None, + "git": IoArgoprojWorkflowV1alpha1GitArtifact.from_dict(obj["git"]) if obj.get("git") is not None else None, + "globalName": obj.get("globalName"), + "hdfs": IoArgoprojWorkflowV1alpha1HDFSArtifact.from_dict(obj["hdfs"]) if obj.get("hdfs") is not None else None, + "http": IoArgoprojWorkflowV1alpha1HTTPArtifact.from_dict(obj["http"]) if obj.get("http") is not None else None, + "mode": obj.get("mode"), + "name": obj.get("name"), + "optional": obj.get("optional"), + "oss": IoArgoprojWorkflowV1alpha1OSSArtifact.from_dict(obj["oss"]) if obj.get("oss") is not None else None, + "path": obj.get("path"), + "raw": IoArgoprojWorkflowV1alpha1RawArtifact.from_dict(obj["raw"]) if obj.get("raw") is not None else None, + "recurseMode": obj.get("recurseMode"), + "s3": IoArgoprojWorkflowV1alpha1S3Artifact.from_dict(obj["s3"]) if obj.get("s3") is not None else None, + "subPath": obj.get("subPath") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc.py new file mode 100644 index 000000000000..42886b364451 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactGC(BaseModel): + """ + ArtifactGC describes how to delete artifacts from completed Workflows - this is embedded into the WorkflowLevelArtifactGC, and also used for individual Artifacts to override that as needed + """ # noqa: E501 + pod_metadata: Optional[IoArgoprojWorkflowV1alpha1Metadata] = Field(default=None, alias="podMetadata") + service_account_name: Optional[StrictStr] = Field(default=None, description="ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion", alias="serviceAccountName") + strategy: Optional[StrictStr] = Field(default=None, description="Strategy is the strategy to use.") + __properties: ClassVar[List[str]] = ["podMetadata", "serviceAccountName", "strategy"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactGC from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pod_metadata + if self.pod_metadata: + _dict['podMetadata'] = self.pod_metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactGC from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "podMetadata": IoArgoprojWorkflowV1alpha1Metadata.from_dict(obj["podMetadata"]) if obj.get("podMetadata") is not None else None, + "serviceAccountName": obj.get("serviceAccountName"), + "strategy": obj.get("strategy") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py new file mode 100644 index 000000000000..ac134b43f95c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc_spec.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_node_spec import IoArgoprojWorkflowV1alpha1ArtifactNodeSpec +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactGCSpec(BaseModel): + """ + ArtifactGCSpec specifies the Artifacts that need to be deleted + """ # noqa: E501 + artifacts_by_node: Optional[Dict[str, IoArgoprojWorkflowV1alpha1ArtifactNodeSpec]] = Field(default=None, description="ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node", alias="artifactsByNode") + __properties: ClassVar[List[str]] = ["artifactsByNode"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactGCSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each value in artifacts_by_node (dict) + _field_dict = {} + if self.artifacts_by_node: + for _key in self.artifacts_by_node: + if self.artifacts_by_node[_key]: + _field_dict[_key] = self.artifacts_by_node[_key].to_dict() + _dict['artifactsByNode'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactGCSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifactsByNode": dict( + (_k, IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.from_dict(_v)) + for _k, _v in obj["artifactsByNode"].items() + ) + if obj.get("artifactsByNode") is not None + else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc_status.py new file mode 100644 index 000000000000..94666fc2c124 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_gc_status.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_result_node_status import IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactGCStatus(BaseModel): + """ + ArtifactGCStatus describes the result of the deletion + """ # noqa: E501 + artifact_results_by_node: Optional[Dict[str, IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus]] = Field(default=None, description="ArtifactResultsByNode maps Node name to result", alias="artifactResultsByNode") + __properties: ClassVar[List[str]] = ["artifactResultsByNode"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactGCStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each value in artifact_results_by_node (dict) + _field_dict = {} + if self.artifact_results_by_node: + for _key in self.artifact_results_by_node: + if self.artifact_results_by_node[_key]: + _field_dict[_key] = self.artifact_results_by_node[_key].to_dict() + _dict['artifactResultsByNode'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactGCStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifactResultsByNode": dict( + (_k, IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.from_dict(_v)) + for _k, _v in obj["artifactResultsByNode"].items() + ) + if obj.get("artifactResultsByNode") is not None + else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_location.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_location.py new file mode 100644 index 000000000000..8d28eb39a025 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_location.py @@ -0,0 +1,141 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactLocation(BaseModel): + """ + ArtifactLocation describes a location for a single or multiple artifacts. It is used as single artifact in the context of inputs/outputs (e.g. outputs.artifacts.artname). It is also used to describe the location of multiple artifacts such as the archive location of a single workflow step, which the executor will use as a default location to store its files. + """ # noqa: E501 + archive_logs: Optional[StrictBool] = Field(default=None, description="ArchiveLogs indicates if the container logs should be archived", alias="archiveLogs") + artifactory: Optional[IoArgoprojWorkflowV1alpha1ArtifactoryArtifact] = None + azure: Optional[IoArgoprojWorkflowV1alpha1AzureArtifact] = None + gcs: Optional[IoArgoprojWorkflowV1alpha1GCSArtifact] = None + git: Optional[IoArgoprojWorkflowV1alpha1GitArtifact] = None + hdfs: Optional[IoArgoprojWorkflowV1alpha1HDFSArtifact] = None + http: Optional[IoArgoprojWorkflowV1alpha1HTTPArtifact] = None + oss: Optional[IoArgoprojWorkflowV1alpha1OSSArtifact] = None + raw: Optional[IoArgoprojWorkflowV1alpha1RawArtifact] = None + s3: Optional[IoArgoprojWorkflowV1alpha1S3Artifact] = None + __properties: ClassVar[List[str]] = ["archiveLogs", "artifactory", "azure", "gcs", "git", "hdfs", "http", "oss", "raw", "s3"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactLocation from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of artifactory + if self.artifactory: + _dict['artifactory'] = self.artifactory.to_dict() + # override the default output from pydantic by calling `to_dict()` of azure + if self.azure: + _dict['azure'] = self.azure.to_dict() + # override the default output from pydantic by calling `to_dict()` of gcs + if self.gcs: + _dict['gcs'] = self.gcs.to_dict() + # override the default output from pydantic by calling `to_dict()` of git + if self.git: + _dict['git'] = self.git.to_dict() + # override the default output from pydantic by calling `to_dict()` of hdfs + if self.hdfs: + _dict['hdfs'] = self.hdfs.to_dict() + # override the default output from pydantic by calling `to_dict()` of http + if self.http: + _dict['http'] = self.http.to_dict() + # override the default output from pydantic by calling `to_dict()` of oss + if self.oss: + _dict['oss'] = self.oss.to_dict() + # override the default output from pydantic by calling `to_dict()` of raw + if self.raw: + _dict['raw'] = self.raw.to_dict() + # override the default output from pydantic by calling `to_dict()` of s3 + if self.s3: + _dict['s3'] = self.s3.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactLocation from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "archiveLogs": obj.get("archiveLogs"), + "artifactory": IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.from_dict(obj["artifactory"]) if obj.get("artifactory") is not None else None, + "azure": IoArgoprojWorkflowV1alpha1AzureArtifact.from_dict(obj["azure"]) if obj.get("azure") is not None else None, + "gcs": IoArgoprojWorkflowV1alpha1GCSArtifact.from_dict(obj["gcs"]) if obj.get("gcs") is not None else None, + "git": IoArgoprojWorkflowV1alpha1GitArtifact.from_dict(obj["git"]) if obj.get("git") is not None else None, + "hdfs": IoArgoprojWorkflowV1alpha1HDFSArtifact.from_dict(obj["hdfs"]) if obj.get("hdfs") is not None else None, + "http": IoArgoprojWorkflowV1alpha1HTTPArtifact.from_dict(obj["http"]) if obj.get("http") is not None else None, + "oss": IoArgoprojWorkflowV1alpha1OSSArtifact.from_dict(obj["oss"]) if obj.get("oss") is not None else None, + "raw": IoArgoprojWorkflowV1alpha1RawArtifact.from_dict(obj["raw"]) if obj.get("raw") is not None else None, + "s3": IoArgoprojWorkflowV1alpha1S3Artifact.from_dict(obj["s3"]) if obj.get("s3") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_node_spec.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_node_spec.py new file mode 100644 index 000000000000..f78abe102508 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_node_spec.py @@ -0,0 +1,106 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactNodeSpec(BaseModel): + """ + ArtifactNodeSpec specifies the Artifacts that need to be deleted for a given Node + """ # noqa: E501 + archive_location: Optional[IoArgoprojWorkflowV1alpha1ArtifactLocation] = Field(default=None, alias="archiveLocation") + artifacts: Optional[Dict[str, IoArgoprojWorkflowV1alpha1Artifact]] = Field(default=None, description="Artifacts maps artifact name to Artifact description") + __properties: ClassVar[List[str]] = ["archiveLocation", "artifacts"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactNodeSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of archive_location + if self.archive_location: + _dict['archiveLocation'] = self.archive_location.to_dict() + # override the default output from pydantic by calling `to_dict()` of each value in artifacts (dict) + _field_dict = {} + if self.artifacts: + for _key in self.artifacts: + if self.artifacts[_key]: + _field_dict[_key] = self.artifacts[_key].to_dict() + _dict['artifacts'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactNodeSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "archiveLocation": IoArgoprojWorkflowV1alpha1ArtifactLocation.from_dict(obj["archiveLocation"]) if obj.get("archiveLocation") is not None else None, + "artifacts": dict( + (_k, IoArgoprojWorkflowV1alpha1Artifact.from_dict(_v)) + for _k, _v in obj["artifacts"].items() + ) + if obj.get("artifacts") is not None + else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_paths.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_paths.py new file mode 100644 index 000000000000..f57d112a2be8 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_paths.py @@ -0,0 +1,173 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactPaths(BaseModel): + """ + ArtifactPaths expands a step from a collection of artifacts + """ # noqa: E501 + archive: Optional[IoArgoprojWorkflowV1alpha1ArchiveStrategy] = None + archive_logs: Optional[StrictBool] = Field(default=None, description="ArchiveLogs indicates if the container logs should be archived", alias="archiveLogs") + artifact_gc: Optional[IoArgoprojWorkflowV1alpha1ArtifactGC] = Field(default=None, alias="artifactGC") + artifactory: Optional[IoArgoprojWorkflowV1alpha1ArtifactoryArtifact] = None + azure: Optional[IoArgoprojWorkflowV1alpha1AzureArtifact] = None + deleted: Optional[StrictBool] = Field(default=None, description="Has this been deleted?") + var_from: Optional[StrictStr] = Field(default=None, description="From allows an artifact to reference an artifact from a previous step", alias="from") + from_expression: Optional[StrictStr] = Field(default=None, description="FromExpression, if defined, is evaluated to specify the value for the artifact", alias="fromExpression") + gcs: Optional[IoArgoprojWorkflowV1alpha1GCSArtifact] = None + git: Optional[IoArgoprojWorkflowV1alpha1GitArtifact] = None + global_name: Optional[StrictStr] = Field(default=None, description="GlobalName exports an output artifact to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.artifacts.XXXX}} and in workflow.status.outputs.artifacts", alias="globalName") + hdfs: Optional[IoArgoprojWorkflowV1alpha1HDFSArtifact] = None + http: Optional[IoArgoprojWorkflowV1alpha1HTTPArtifact] = None + mode: Optional[StrictInt] = Field(default=None, description="mode bits to use on this file, must be a value between 0 and 0777 set when loading input artifacts.") + name: StrictStr = Field(description="name of the artifact. must be unique within a template's inputs/outputs.") + optional: Optional[StrictBool] = Field(default=None, description="Make Artifacts optional, if Artifacts doesn't generate or exist") + oss: Optional[IoArgoprojWorkflowV1alpha1OSSArtifact] = None + path: Optional[StrictStr] = Field(default=None, description="Path is the container path to the artifact") + raw: Optional[IoArgoprojWorkflowV1alpha1RawArtifact] = None + recurse_mode: Optional[StrictBool] = Field(default=None, description="If mode is set, apply the permission recursively into the artifact if it is a folder", alias="recurseMode") + s3: Optional[IoArgoprojWorkflowV1alpha1S3Artifact] = None + sub_path: Optional[StrictStr] = Field(default=None, description="SubPath allows an artifact to be sourced from a subpath within the specified source", alias="subPath") + __properties: ClassVar[List[str]] = ["archive", "archiveLogs", "artifactGC", "artifactory", "azure", "deleted", "from", "fromExpression", "gcs", "git", "globalName", "hdfs", "http", "mode", "name", "optional", "oss", "path", "raw", "recurseMode", "s3", "subPath"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactPaths from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of archive + if self.archive: + _dict['archive'] = self.archive.to_dict() + # override the default output from pydantic by calling `to_dict()` of artifact_gc + if self.artifact_gc: + _dict['artifactGC'] = self.artifact_gc.to_dict() + # override the default output from pydantic by calling `to_dict()` of artifactory + if self.artifactory: + _dict['artifactory'] = self.artifactory.to_dict() + # override the default output from pydantic by calling `to_dict()` of azure + if self.azure: + _dict['azure'] = self.azure.to_dict() + # override the default output from pydantic by calling `to_dict()` of gcs + if self.gcs: + _dict['gcs'] = self.gcs.to_dict() + # override the default output from pydantic by calling `to_dict()` of git + if self.git: + _dict['git'] = self.git.to_dict() + # override the default output from pydantic by calling `to_dict()` of hdfs + if self.hdfs: + _dict['hdfs'] = self.hdfs.to_dict() + # override the default output from pydantic by calling `to_dict()` of http + if self.http: + _dict['http'] = self.http.to_dict() + # override the default output from pydantic by calling `to_dict()` of oss + if self.oss: + _dict['oss'] = self.oss.to_dict() + # override the default output from pydantic by calling `to_dict()` of raw + if self.raw: + _dict['raw'] = self.raw.to_dict() + # override the default output from pydantic by calling `to_dict()` of s3 + if self.s3: + _dict['s3'] = self.s3.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactPaths from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "archive": IoArgoprojWorkflowV1alpha1ArchiveStrategy.from_dict(obj["archive"]) if obj.get("archive") is not None else None, + "archiveLogs": obj.get("archiveLogs"), + "artifactGC": IoArgoprojWorkflowV1alpha1ArtifactGC.from_dict(obj["artifactGC"]) if obj.get("artifactGC") is not None else None, + "artifactory": IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.from_dict(obj["artifactory"]) if obj.get("artifactory") is not None else None, + "azure": IoArgoprojWorkflowV1alpha1AzureArtifact.from_dict(obj["azure"]) if obj.get("azure") is not None else None, + "deleted": obj.get("deleted"), + "from": obj.get("from"), + "fromExpression": obj.get("fromExpression"), + "gcs": IoArgoprojWorkflowV1alpha1GCSArtifact.from_dict(obj["gcs"]) if obj.get("gcs") is not None else None, + "git": IoArgoprojWorkflowV1alpha1GitArtifact.from_dict(obj["git"]) if obj.get("git") is not None else None, + "globalName": obj.get("globalName"), + "hdfs": IoArgoprojWorkflowV1alpha1HDFSArtifact.from_dict(obj["hdfs"]) if obj.get("hdfs") is not None else None, + "http": IoArgoprojWorkflowV1alpha1HTTPArtifact.from_dict(obj["http"]) if obj.get("http") is not None else None, + "mode": obj.get("mode"), + "name": obj.get("name"), + "optional": obj.get("optional"), + "oss": IoArgoprojWorkflowV1alpha1OSSArtifact.from_dict(obj["oss"]) if obj.get("oss") is not None else None, + "path": obj.get("path"), + "raw": IoArgoprojWorkflowV1alpha1RawArtifact.from_dict(obj["raw"]) if obj.get("raw") is not None else None, + "recurseMode": obj.get("recurseMode"), + "s3": IoArgoprojWorkflowV1alpha1S3Artifact.from_dict(obj["s3"]) if obj.get("s3") is not None else None, + "subPath": obj.get("subPath") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository.py new file mode 100644 index 000000000000..f91fe23a5ed2 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact_repository import IoArgoprojWorkflowV1alpha1AzureArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact_repository import IoArgoprojWorkflowV1alpha1GCSArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact_repository import IoArgoprojWorkflowV1alpha1HDFSArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact_repository import IoArgoprojWorkflowV1alpha1OSSArtifactRepository +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact_repository import IoArgoprojWorkflowV1alpha1S3ArtifactRepository +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactRepository(BaseModel): + """ + ArtifactRepository represents an artifact repository in which a controller will store its artifacts + """ # noqa: E501 + archive_logs: Optional[StrictBool] = Field(default=None, description="ArchiveLogs enables log archiving", alias="archiveLogs") + artifactory: Optional[IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository] = None + azure: Optional[IoArgoprojWorkflowV1alpha1AzureArtifactRepository] = None + gcs: Optional[IoArgoprojWorkflowV1alpha1GCSArtifactRepository] = None + hdfs: Optional[IoArgoprojWorkflowV1alpha1HDFSArtifactRepository] = None + oss: Optional[IoArgoprojWorkflowV1alpha1OSSArtifactRepository] = None + s3: Optional[IoArgoprojWorkflowV1alpha1S3ArtifactRepository] = None + __properties: ClassVar[List[str]] = ["archiveLogs", "artifactory", "azure", "gcs", "hdfs", "oss", "s3"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepository from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of artifactory + if self.artifactory: + _dict['artifactory'] = self.artifactory.to_dict() + # override the default output from pydantic by calling `to_dict()` of azure + if self.azure: + _dict['azure'] = self.azure.to_dict() + # override the default output from pydantic by calling `to_dict()` of gcs + if self.gcs: + _dict['gcs'] = self.gcs.to_dict() + # override the default output from pydantic by calling `to_dict()` of hdfs + if self.hdfs: + _dict['hdfs'] = self.hdfs.to_dict() + # override the default output from pydantic by calling `to_dict()` of oss + if self.oss: + _dict['oss'] = self.oss.to_dict() + # override the default output from pydantic by calling `to_dict()` of s3 + if self.s3: + _dict['s3'] = self.s3.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepository from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "archiveLogs": obj.get("archiveLogs"), + "artifactory": IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository.from_dict(obj["artifactory"]) if obj.get("artifactory") is not None else None, + "azure": IoArgoprojWorkflowV1alpha1AzureArtifactRepository.from_dict(obj["azure"]) if obj.get("azure") is not None else None, + "gcs": IoArgoprojWorkflowV1alpha1GCSArtifactRepository.from_dict(obj["gcs"]) if obj.get("gcs") is not None else None, + "hdfs": IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.from_dict(obj["hdfs"]) if obj.get("hdfs") is not None else None, + "oss": IoArgoprojWorkflowV1alpha1OSSArtifactRepository.from_dict(obj["oss"]) if obj.get("oss") is not None else None, + "s3": IoArgoprojWorkflowV1alpha1S3ArtifactRepository.from_dict(obj["s3"]) if obj.get("s3") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository_ref.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository_ref.py new file mode 100644 index 000000000000..221b0975a574 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository_ref.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef + """ # noqa: E501 + config_map: Optional[StrictStr] = Field(default=None, description="The name of the config map. Defaults to \"artifact-repositories\".", alias="configMap") + key: Optional[StrictStr] = Field(default=None, description="The config map key. Defaults to the value of the \"workflows.argoproj.io/default-artifact-repository\" annotation.") + __properties: ClassVar[List[str]] = ["configMap", "key"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configMap": obj.get("configMap"), + "key": obj.get("key") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository_ref_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository_ref_status.py new file mode 100644 index 000000000000..aaccc3a11dbc --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_repository_ref_status.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactRepository +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus + """ # noqa: E501 + artifact_repository: Optional[IoArgoprojWorkflowV1alpha1ArtifactRepository] = Field(default=None, alias="artifactRepository") + config_map: Optional[StrictStr] = Field(default=None, description="The name of the config map. Defaults to \"artifact-repositories\".", alias="configMap") + default: Optional[StrictBool] = Field(default=None, description="If this ref represents the default artifact repository, rather than a config map.") + key: Optional[StrictStr] = Field(default=None, description="The config map key. Defaults to the value of the \"workflows.argoproj.io/default-artifact-repository\" annotation.") + namespace: Optional[StrictStr] = Field(default=None, description="The namespace of the config map. Defaults to the workflow's namespace, or the controller's namespace (if found).") + __properties: ClassVar[List[str]] = ["artifactRepository", "configMap", "default", "key", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of artifact_repository + if self.artifact_repository: + _dict['artifactRepository'] = self.artifact_repository.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifactRepository": IoArgoprojWorkflowV1alpha1ArtifactRepository.from_dict(obj["artifactRepository"]) if obj.get("artifactRepository") is not None else None, + "configMap": obj.get("configMap"), + "default": obj.get("default"), + "key": obj.get("key"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_result.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_result.py new file mode 100644 index 000000000000..e676d339ca66 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_result.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactResult(BaseModel): + """ + ArtifactResult describes the result of attempting to delete a given Artifact + """ # noqa: E501 + error: Optional[StrictStr] = Field(default=None, description="Error is an optional error message which should be set if Success==false") + name: StrictStr = Field(description="Name is the name of the Artifact") + success: Optional[StrictBool] = Field(default=None, description="Success describes whether the deletion succeeded") + __properties: ClassVar[List[str]] = ["error", "name", "success"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": obj.get("error"), + "name": obj.get("name"), + "success": obj.get("success") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py new file mode 100644 index 000000000000..b06f4aec549d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifact_result_node_status.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_result import IoArgoprojWorkflowV1alpha1ArtifactResult +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus(BaseModel): + """ + ArtifactResultNodeStatus describes the result of the deletion on a given node + """ # noqa: E501 + artifact_results: Optional[Dict[str, IoArgoprojWorkflowV1alpha1ArtifactResult]] = Field(default=None, description="ArtifactResults maps Artifact name to result of the deletion", alias="artifactResults") + __properties: ClassVar[List[str]] = ["artifactResults"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each value in artifact_results (dict) + _field_dict = {} + if self.artifact_results: + for _key in self.artifact_results: + if self.artifact_results[_key]: + _field_dict[_key] = self.artifact_results[_key].to_dict() + _dict['artifactResults'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifactResults": dict( + (_k, IoArgoprojWorkflowV1alpha1ArtifactResult.from_dict(_v)) + for _k, _v in obj["artifactResults"].items() + ) + if obj.get("artifactResults") is not None + else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifactory_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifactory_artifact.py new file mode 100644 index 000000000000..be0872e80cfb --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifactory_artifact.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactoryArtifact(BaseModel): + """ + ArtifactoryArtifact is the location of an artifactory artifact + """ # noqa: E501 + password_secret: Optional[SecretKeySelector] = Field(default=None, alias="passwordSecret") + url: StrictStr = Field(description="URL of the artifact") + username_secret: Optional[SecretKeySelector] = Field(default=None, alias="usernameSecret") + __properties: ClassVar[List[str]] = ["passwordSecret", "url", "usernameSecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactoryArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of password_secret + if self.password_secret: + _dict['passwordSecret'] = self.password_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of username_secret + if self.username_secret: + _dict['usernameSecret'] = self.username_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactoryArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "passwordSecret": SecretKeySelector.from_dict(obj["passwordSecret"]) if obj.get("passwordSecret") is not None else None, + "url": obj.get("url"), + "usernameSecret": SecretKeySelector.from_dict(obj["usernameSecret"]) if obj.get("usernameSecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifactory_artifact_repository.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifactory_artifact_repository.py new file mode 100644 index 000000000000..21ac22a4b074 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_artifactory_artifact_repository.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository(BaseModel): + """ + ArtifactoryArtifactRepository defines the controller configuration for an artifactory artifact repository + """ # noqa: E501 + key_format: Optional[StrictStr] = Field(default=None, description="KeyFormat defines the format of how to store keys and can reference workflow variables.", alias="keyFormat") + password_secret: Optional[SecretKeySelector] = Field(default=None, alias="passwordSecret") + repo_url: Optional[StrictStr] = Field(default=None, description="RepoURL is the url for artifactory repo.", alias="repoURL") + username_secret: Optional[SecretKeySelector] = Field(default=None, alias="usernameSecret") + __properties: ClassVar[List[str]] = ["keyFormat", "passwordSecret", "repoURL", "usernameSecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of password_secret + if self.password_secret: + _dict['passwordSecret'] = self.password_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of username_secret + if self.username_secret: + _dict['usernameSecret'] = self.username_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "keyFormat": obj.get("keyFormat"), + "passwordSecret": SecretKeySelector.from_dict(obj["passwordSecret"]) if obj.get("passwordSecret") is not None else None, + "repoURL": obj.get("repoURL"), + "usernameSecret": SecretKeySelector.from_dict(obj["usernameSecret"]) if obj.get("usernameSecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_azure_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_azure_artifact.py new file mode 100644 index 000000000000..060f9fd3c492 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_azure_artifact.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1AzureArtifact(BaseModel): + """ + AzureArtifact is the location of a an Azure Storage artifact + """ # noqa: E501 + account_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="accountKeySecret") + blob: StrictStr = Field(description="Blob is the blob name (i.e., path) in the container where the artifact resides") + container: StrictStr = Field(description="Container is the container where resources will be stored") + endpoint: StrictStr = Field(description="Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\"") + use_sdk_creds: Optional[StrictBool] = Field(default=None, description="UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", alias="useSDKCreds") + __properties: ClassVar[List[str]] = ["accountKeySecret", "blob", "container", "endpoint", "useSDKCreds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1AzureArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of account_key_secret + if self.account_key_secret: + _dict['accountKeySecret'] = self.account_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1AzureArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accountKeySecret": SecretKeySelector.from_dict(obj["accountKeySecret"]) if obj.get("accountKeySecret") is not None else None, + "blob": obj.get("blob"), + "container": obj.get("container"), + "endpoint": obj.get("endpoint"), + "useSDKCreds": obj.get("useSDKCreds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py new file mode 100644 index 000000000000..494249bfca3f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_azure_artifact_repository.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1AzureArtifactRepository(BaseModel): + """ + AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository + """ # noqa: E501 + account_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="accountKeySecret") + blob_name_format: Optional[StrictStr] = Field(default=None, description="BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables", alias="blobNameFormat") + container: StrictStr = Field(description="Container is the container where resources will be stored") + endpoint: StrictStr = Field(description="Endpoint is the service url associated with an account. It is most likely \"https://.blob.core.windows.net\"") + use_sdk_creds: Optional[StrictBool] = Field(default=None, description="UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", alias="useSDKCreds") + __properties: ClassVar[List[str]] = ["accountKeySecret", "blobNameFormat", "container", "endpoint", "useSDKCreds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1AzureArtifactRepository from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of account_key_secret + if self.account_key_secret: + _dict['accountKeySecret'] = self.account_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1AzureArtifactRepository from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accountKeySecret": SecretKeySelector.from_dict(obj["accountKeySecret"]) if obj.get("accountKeySecret") is not None else None, + "blobNameFormat": obj.get("blobNameFormat"), + "container": obj.get("container"), + "endpoint": obj.get("endpoint"), + "useSDKCreds": obj.get("useSDKCreds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_backoff.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_backoff.py new file mode 100644 index 000000000000..42e4b8fc90a5 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_backoff.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Backoff(BaseModel): + """ + Backoff is a backoff strategy to use within retryStrategy + """ # noqa: E501 + duration: Optional[StrictStr] = Field(default=None, description="Duration is the amount to back off. Default unit is seconds, but could also be a duration (e.g. \"2m\", \"1h\")") + factor: Optional[StrictStr] = None + max_duration: Optional[StrictStr] = Field(default=None, description="MaxDuration is the maximum amount of time allowed for a workflow in the backoff strategy", alias="maxDuration") + __properties: ClassVar[List[str]] = ["duration", "factor", "maxDuration"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Backoff from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Backoff from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "duration": obj.get("duration"), + "factor": obj.get("factor"), + "maxDuration": obj.get("maxDuration") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_basic_auth.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_basic_auth.py new file mode 100644 index 000000000000..21f0764a80d9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_basic_auth.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1BasicAuth(BaseModel): + """ + BasicAuth describes the secret selectors required for basic authentication + """ # noqa: E501 + password_secret: Optional[SecretKeySelector] = Field(default=None, alias="passwordSecret") + username_secret: Optional[SecretKeySelector] = Field(default=None, alias="usernameSecret") + __properties: ClassVar[List[str]] = ["passwordSecret", "usernameSecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1BasicAuth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of password_secret + if self.password_secret: + _dict['passwordSecret'] = self.password_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of username_secret + if self.username_secret: + _dict['usernameSecret'] = self.username_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1BasicAuth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "passwordSecret": SecretKeySelector.from_dict(obj["passwordSecret"]) if obj.get("passwordSecret") is not None else None, + "usernameSecret": SecretKeySelector.from_dict(obj["usernameSecret"]) if obj.get("usernameSecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cache.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cache.py new file mode 100644 index 000000000000..6582dc381b5e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cache.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Cache(BaseModel): + """ + Cache is the configuration for the type of cache to be used + """ # noqa: E501 + config_map: ConfigMapKeySelector = Field(alias="configMap") + __properties: ClassVar[List[str]] = ["configMap"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Cache from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config_map + if self.config_map: + _dict['configMap'] = self.config_map.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Cache from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configMap": ConfigMapKeySelector.from_dict(obj["configMap"]) if obj.get("configMap") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_client_cert_auth.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_client_cert_auth.py new file mode 100644 index 000000000000..29df8e1010d6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_client_cert_auth.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ClientCertAuth(BaseModel): + """ + ClientCertAuth holds necessary information for client authentication via certificates + """ # noqa: E501 + client_cert_secret: Optional[SecretKeySelector] = Field(default=None, alias="clientCertSecret") + client_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="clientKeySecret") + __properties: ClassVar[List[str]] = ["clientCertSecret", "clientKeySecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClientCertAuth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of client_cert_secret + if self.client_cert_secret: + _dict['clientCertSecret'] = self.client_cert_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of client_key_secret + if self.client_key_secret: + _dict['clientKeySecret'] = self.client_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClientCertAuth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "clientCertSecret": SecretKeySelector.from_dict(obj["clientCertSecret"]) if obj.get("clientCertSecret") is not None else None, + "clientKeySecret": SecretKeySelector.from_dict(obj["clientKeySecret"]) if obj.get("clientKeySecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template.py new file mode 100644 index 000000000000..76ad497b2fef --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec +from argo_workflows.models.object_meta import ObjectMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate(BaseModel): + """ + ClusterWorkflowTemplate is the definition of a workflow template resource in cluster scope + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ObjectMeta + spec: IoArgoprojWorkflowV1alpha1WorkflowSpec + __properties: ClassVar[List[str]] = ["apiVersion", "kind", "metadata", "spec"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of spec + if self.spec: + _dict['spec'] = self.spec.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "kind": obj.get("kind"), + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "spec": IoArgoprojWorkflowV1alpha1WorkflowSpec.from_dict(obj["spec"]) if obj.get("spec") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request.py new file mode 100644 index 000000000000..7ac4c5df15a1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.create_options import CreateOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest + """ # noqa: E501 + create_options: Optional[CreateOptions] = Field(default=None, alias="createOptions") + template: Optional[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate] = None + __properties: ClassVar[List[str]] = ["createOptions", "template"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of create_options + if self.create_options: + _dict['createOptions'] = self.create_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of template + if self.template: + _dict['template'] = self.template.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "createOptions": CreateOptions.from_dict(obj["createOptions"]) if obj.get("createOptions") is not None else None, + "template": IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.from_dict(obj["template"]) if obj.get("template") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request.py new file mode 100644 index 000000000000..f77a4b607207 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.create_options import CreateOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest + """ # noqa: E501 + create_options: Optional[CreateOptions] = Field(default=None, alias="createOptions") + template: Optional[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate] = None + __properties: ClassVar[List[str]] = ["createOptions", "template"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of create_options + if self.create_options: + _dict['createOptions'] = self.create_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of template + if self.template: + _dict['template'] = self.template.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "createOptions": CreateOptions.from_dict(obj["createOptions"]) if obj.get("createOptions") is not None else None, + "template": IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.from_dict(obj["template"]) if obj.get("template") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_list.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_list.py new file mode 100644 index 000000000000..706c9945df2e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_list.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.list_meta import ListMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList(BaseModel): + """ + ClusterWorkflowTemplateList is list of ClusterWorkflowTemplate resources + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + items: List[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate] + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ListMeta + __properties: ClassVar[List[str]] = ["apiVersion", "items", "kind", "metadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "items": [IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "kind": obj.get("kind"), + "metadata": ListMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request.py new file mode 100644 index 000000000000..85c4ebc8481b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest + """ # noqa: E501 + name: Optional[StrictStr] = Field(default=None, description="DEPRECATED: This field is ignored.") + template: Optional[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate] = None + __properties: ClassVar[List[str]] = ["name", "template"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of template + if self.template: + _dict['template'] = self.template.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "template": IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.from_dict(obj["template"]) if obj.get("template") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_collect_event_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_collect_event_request.py new file mode 100644 index 000000000000..a2a96c18dcdb --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_collect_event_request.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1CollectEventRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1CollectEventRequest + """ # noqa: E501 + name: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CollectEventRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CollectEventRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_column.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_column.py new file mode 100644 index 000000000000..06a1cb722ad6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_column.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Column(BaseModel): + """ + Column is a custom column that will be exposed in the Workflow List View. + """ # noqa: E501 + key: StrictStr = Field(description="The key of the label or annotation, e.g., \"workflows.argoproj.io/completed\".") + name: StrictStr = Field(description="The name of this column, e.g., \"Workflow Completed\".") + type: StrictStr = Field(description="The type of this column, \"label\" or \"annotation\".") + __properties: ClassVar[List[str]] = ["key", "name", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Column from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Column from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "name": obj.get("name"), + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_condition.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_condition.py new file mode 100644 index 000000000000..41846dded6f5 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_condition.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Condition(BaseModel): + """ + IoArgoprojWorkflowV1alpha1Condition + """ # noqa: E501 + message: Optional[StrictStr] = Field(default=None, description="Message is the condition message") + status: Optional[StrictStr] = Field(default=None, description="Status is the status of the condition") + type: Optional[StrictStr] = Field(default=None, description="Type is the type of condition") + __properties: ClassVar[List[str]] = ["message", "status", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Condition from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Condition from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "message": obj.get("message"), + "status": obj.get("status"), + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_node.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_node.py new file mode 100644 index 000000000000..901853ed6b99 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_node.py @@ -0,0 +1,193 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.container_port import ContainerPort +from argo_workflows.models.env_from_source import EnvFromSource +from argo_workflows.models.env_var import EnvVar +from argo_workflows.models.lifecycle import Lifecycle +from argo_workflows.models.probe import Probe +from argo_workflows.models.resource_requirements import ResourceRequirements +from argo_workflows.models.security_context import SecurityContext +from argo_workflows.models.volume_device import VolumeDevice +from argo_workflows.models.volume_mount import VolumeMount +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ContainerNode(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ContainerNode + """ # noqa: E501 + args: Optional[List[StrictStr]] = Field(default=None, description="Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell") + command: Optional[List[StrictStr]] = Field(default=None, description="Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell") + dependencies: Optional[List[StrictStr]] = None + env: Optional[List[EnvVar]] = Field(default=None, description="List of environment variables to set in the container. Cannot be updated.") + env_from: Optional[List[EnvFromSource]] = Field(default=None, description="List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.", alias="envFrom") + image: Optional[StrictStr] = Field(default=None, description="Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.") + image_pull_policy: Optional[StrictStr] = Field(default=None, description="Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images", alias="imagePullPolicy") + lifecycle: Optional[Lifecycle] = None + liveness_probe: Optional[Probe] = Field(default=None, alias="livenessProbe") + name: StrictStr = Field(description="Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated.") + ports: Optional[List[ContainerPort]] = Field(default=None, description="List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated.") + readiness_probe: Optional[Probe] = Field(default=None, alias="readinessProbe") + resources: Optional[ResourceRequirements] = None + security_context: Optional[SecurityContext] = Field(default=None, alias="securityContext") + startup_probe: Optional[Probe] = Field(default=None, alias="startupProbe") + stdin: Optional[StrictBool] = Field(default=None, description="Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.") + stdin_once: Optional[StrictBool] = Field(default=None, description="Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false", alias="stdinOnce") + termination_message_path: Optional[StrictStr] = Field(default=None, description="Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.", alias="terminationMessagePath") + termination_message_policy: Optional[StrictStr] = Field(default=None, description="Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated.", alias="terminationMessagePolicy") + tty: Optional[StrictBool] = Field(default=None, description="Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.") + volume_devices: Optional[List[VolumeDevice]] = Field(default=None, description="volumeDevices is the list of block devices to be used by the container.", alias="volumeDevices") + volume_mounts: Optional[List[VolumeMount]] = Field(default=None, description="Pod volumes to mount into the container's filesystem. Cannot be updated.", alias="volumeMounts") + working_dir: Optional[StrictStr] = Field(default=None, description="Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.", alias="workingDir") + __properties: ClassVar[List[str]] = ["args", "command", "dependencies", "env", "envFrom", "image", "imagePullPolicy", "lifecycle", "livenessProbe", "name", "ports", "readinessProbe", "resources", "securityContext", "startupProbe", "stdin", "stdinOnce", "terminationMessagePath", "terminationMessagePolicy", "tty", "volumeDevices", "volumeMounts", "workingDir"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ContainerNode from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in env (list) + _items = [] + if self.env: + for _item in self.env: + if _item: + _items.append(_item.to_dict()) + _dict['env'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in env_from (list) + _items = [] + if self.env_from: + for _item in self.env_from: + if _item: + _items.append(_item.to_dict()) + _dict['envFrom'] = _items + # override the default output from pydantic by calling `to_dict()` of lifecycle + if self.lifecycle: + _dict['lifecycle'] = self.lifecycle.to_dict() + # override the default output from pydantic by calling `to_dict()` of liveness_probe + if self.liveness_probe: + _dict['livenessProbe'] = self.liveness_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in ports (list) + _items = [] + if self.ports: + for _item in self.ports: + if _item: + _items.append(_item.to_dict()) + _dict['ports'] = _items + # override the default output from pydantic by calling `to_dict()` of readiness_probe + if self.readiness_probe: + _dict['readinessProbe'] = self.readiness_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of resources + if self.resources: + _dict['resources'] = self.resources.to_dict() + # override the default output from pydantic by calling `to_dict()` of security_context + if self.security_context: + _dict['securityContext'] = self.security_context.to_dict() + # override the default output from pydantic by calling `to_dict()` of startup_probe + if self.startup_probe: + _dict['startupProbe'] = self.startup_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in volume_devices (list) + _items = [] + if self.volume_devices: + for _item in self.volume_devices: + if _item: + _items.append(_item.to_dict()) + _dict['volumeDevices'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in volume_mounts (list) + _items = [] + if self.volume_mounts: + for _item in self.volume_mounts: + if _item: + _items.append(_item.to_dict()) + _dict['volumeMounts'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ContainerNode from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "args": obj.get("args"), + "command": obj.get("command"), + "dependencies": obj.get("dependencies"), + "env": [EnvVar.from_dict(_item) for _item in obj["env"]] if obj.get("env") is not None else None, + "envFrom": [EnvFromSource.from_dict(_item) for _item in obj["envFrom"]] if obj.get("envFrom") is not None else None, + "image": obj.get("image"), + "imagePullPolicy": obj.get("imagePullPolicy"), + "lifecycle": Lifecycle.from_dict(obj["lifecycle"]) if obj.get("lifecycle") is not None else None, + "livenessProbe": Probe.from_dict(obj["livenessProbe"]) if obj.get("livenessProbe") is not None else None, + "name": obj.get("name"), + "ports": [ContainerPort.from_dict(_item) for _item in obj["ports"]] if obj.get("ports") is not None else None, + "readinessProbe": Probe.from_dict(obj["readinessProbe"]) if obj.get("readinessProbe") is not None else None, + "resources": ResourceRequirements.from_dict(obj["resources"]) if obj.get("resources") is not None else None, + "securityContext": SecurityContext.from_dict(obj["securityContext"]) if obj.get("securityContext") is not None else None, + "startupProbe": Probe.from_dict(obj["startupProbe"]) if obj.get("startupProbe") is not None else None, + "stdin": obj.get("stdin"), + "stdinOnce": obj.get("stdinOnce"), + "terminationMessagePath": obj.get("terminationMessagePath"), + "terminationMessagePolicy": obj.get("terminationMessagePolicy"), + "tty": obj.get("tty"), + "volumeDevices": [VolumeDevice.from_dict(_item) for _item in obj["volumeDevices"]] if obj.get("volumeDevices") is not None else None, + "volumeMounts": [VolumeMount.from_dict(_item) for _item in obj["volumeMounts"]] if obj.get("volumeMounts") is not None else None, + "workingDir": obj.get("workingDir") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_set_retry_strategy.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_set_retry_strategy.py new file mode 100644 index 000000000000..e81ec9fac50a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_set_retry_strategy.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy + """ # noqa: E501 + duration: Optional[StrictStr] = Field(default=None, description="Duration is the time between each retry, examples values are \"300ms\", \"1s\" or \"5m\". Valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\", \"m\", \"h\".") + retries: StrictStr + __properties: ClassVar[List[str]] = ["duration", "retries"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "duration": obj.get("duration"), + "retries": obj.get("retries") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_set_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_set_template.py new file mode 100644 index 000000000000..e3cef836dc1f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_container_set_template.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_node import IoArgoprojWorkflowV1alpha1ContainerNode +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_set_retry_strategy import IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy +from argo_workflows.models.volume_mount import VolumeMount +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ContainerSetTemplate(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ContainerSetTemplate + """ # noqa: E501 + containers: List[IoArgoprojWorkflowV1alpha1ContainerNode] + retry_strategy: Optional[IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy] = Field(default=None, alias="retryStrategy") + volume_mounts: Optional[List[VolumeMount]] = Field(default=None, alias="volumeMounts") + __properties: ClassVar[List[str]] = ["containers", "retryStrategy", "volumeMounts"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ContainerSetTemplate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in containers (list) + _items = [] + if self.containers: + for _item in self.containers: + if _item: + _items.append(_item.to_dict()) + _dict['containers'] = _items + # override the default output from pydantic by calling `to_dict()` of retry_strategy + if self.retry_strategy: + _dict['retryStrategy'] = self.retry_strategy.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in volume_mounts (list) + _items = [] + if self.volume_mounts: + for _item in self.volume_mounts: + if _item: + _items.append(_item.to_dict()) + _dict['volumeMounts'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ContainerSetTemplate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "containers": [IoArgoprojWorkflowV1alpha1ContainerNode.from_dict(_item) for _item in obj["containers"]] if obj.get("containers") is not None else None, + "retryStrategy": IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy.from_dict(obj["retryStrategy"]) if obj.get("retryStrategy") is not None else None, + "volumeMounts": [VolumeMount.from_dict(_item) for _item in obj["volumeMounts"]] if obj.get("volumeMounts") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_continue_on.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_continue_on.py new file mode 100644 index 000000000000..0073d69f7dc7 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_continue_on.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ContinueOn(BaseModel): + """ + ContinueOn defines if a workflow should continue even if a task or step fails/errors. It can be specified if the workflow should continue when the pod errors, fails or both. + """ # noqa: E501 + error: Optional[StrictBool] = None + failed: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["error", "failed"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ContinueOn from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ContinueOn from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": obj.get("error"), + "failed": obj.get("failed") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_counter.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_counter.py new file mode 100644 index 000000000000..b1281c5954d4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_counter.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Counter(BaseModel): + """ + Counter is a Counter prometheus metric + """ # noqa: E501 + value: StrictStr = Field(description="Value is the value of the metric") + __properties: ClassVar[List[str]] = ["value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Counter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Counter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_create_cron_workflow_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_create_cron_workflow_request.py new file mode 100644 index 000000000000..55ebf34249ad --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_create_cron_workflow_request.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.create_options import CreateOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest + """ # noqa: E501 + create_options: Optional[CreateOptions] = Field(default=None, alias="createOptions") + cron_workflow: Optional[IoArgoprojWorkflowV1alpha1CronWorkflow] = Field(default=None, alias="cronWorkflow") + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["createOptions", "cronWorkflow", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of create_options + if self.create_options: + _dict['createOptions'] = self.create_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of cron_workflow + if self.cron_workflow: + _dict['cronWorkflow'] = self.cron_workflow.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "createOptions": CreateOptions.from_dict(obj["createOptions"]) if obj.get("createOptions") is not None else None, + "cronWorkflow": IoArgoprojWorkflowV1alpha1CronWorkflow.from_dict(obj["cronWorkflow"]) if obj.get("cronWorkflow") is not None else None, + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_create_s3_bucket_options.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_create_s3_bucket_options.py new file mode 100644 index 000000000000..904e8d5519d3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_create_s3_bucket_options.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1CreateS3BucketOptions(BaseModel): + """ + CreateS3BucketOptions options used to determine automatic automatic bucket-creation process + """ # noqa: E501 + object_locking: Optional[StrictBool] = Field(default=None, description="ObjectLocking Enable object locking", alias="objectLocking") + __properties: ClassVar[List[str]] = ["objectLocking"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CreateS3BucketOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CreateS3BucketOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "objectLocking": obj.get("objectLocking") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow.py new file mode 100644 index 000000000000..a459c5aa7a24 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_spec import IoArgoprojWorkflowV1alpha1CronWorkflowSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_status import IoArgoprojWorkflowV1alpha1CronWorkflowStatus +from argo_workflows.models.object_meta import ObjectMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1CronWorkflow(BaseModel): + """ + CronWorkflow is the definition of a scheduled workflow resource + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ObjectMeta + spec: IoArgoprojWorkflowV1alpha1CronWorkflowSpec + status: Optional[IoArgoprojWorkflowV1alpha1CronWorkflowStatus] = None + __properties: ClassVar[List[str]] = ["apiVersion", "kind", "metadata", "spec", "status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflow from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of spec + if self.spec: + _dict['spec'] = self.spec.to_dict() + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflow from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "kind": obj.get("kind"), + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "spec": IoArgoprojWorkflowV1alpha1CronWorkflowSpec.from_dict(obj["spec"]) if obj.get("spec") is not None else None, + "status": IoArgoprojWorkflowV1alpha1CronWorkflowStatus.from_dict(obj["status"]) if obj.get("status") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_list.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_list.py new file mode 100644 index 000000000000..637e1fca8a4a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_list.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.list_meta import ListMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1CronWorkflowList(BaseModel): + """ + CronWorkflowList is list of CronWorkflow resources + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + items: List[IoArgoprojWorkflowV1alpha1CronWorkflow] + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ListMeta + __properties: ClassVar[List[str]] = ["apiVersion", "items", "kind", "metadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "items": [IoArgoprojWorkflowV1alpha1CronWorkflow.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "kind": obj.get("kind"), + "metadata": ListMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_resume_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_resume_request.py new file mode 100644 index 000000000000..9c2cd147c2cb --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_resume_request.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest + """ # noqa: E501 + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_spec.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_spec.py new file mode 100644 index 000000000000..534ae8c92967 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_spec.py @@ -0,0 +1,119 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_stop_strategy import IoArgoprojWorkflowV1alpha1StopStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec +from argo_workflows.models.object_meta import ObjectMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1CronWorkflowSpec(BaseModel): + """ + CronWorkflowSpec is the specification of a CronWorkflow + """ # noqa: E501 + concurrency_policy: Optional[StrictStr] = Field(default=None, description="ConcurrencyPolicy is the K8s-style concurrency policy that will be used", alias="concurrencyPolicy") + failed_jobs_history_limit: Optional[StrictInt] = Field(default=None, description="FailedJobsHistoryLimit is the number of failed jobs to be kept at a time", alias="failedJobsHistoryLimit") + schedule: StrictStr = Field(description="Schedule is a schedule to run the Workflow in Cron format") + schedules: Optional[List[StrictStr]] = Field(default=None, description="Schedules is a list of schedules to run the Workflow in Cron format") + starting_deadline_seconds: Optional[StrictInt] = Field(default=None, description="StartingDeadlineSeconds is the K8s-style deadline that will limit the time a CronWorkflow will be run after its original scheduled time if it is missed.", alias="startingDeadlineSeconds") + stop_strategy: Optional[IoArgoprojWorkflowV1alpha1StopStrategy] = Field(default=None, alias="stopStrategy") + successful_jobs_history_limit: Optional[StrictInt] = Field(default=None, description="SuccessfulJobsHistoryLimit is the number of successful jobs to be kept at a time", alias="successfulJobsHistoryLimit") + suspend: Optional[StrictBool] = Field(default=None, description="Suspend is a flag that will stop new CronWorkflows from running if set to true") + timezone: Optional[StrictStr] = Field(default=None, description="Timezone is the timezone against which the cron schedule will be calculated, e.g. \"Asia/Tokyo\". Default is machine's local time.") + workflow_metadata: Optional[ObjectMeta] = Field(default=None, alias="workflowMetadata") + workflow_spec: IoArgoprojWorkflowV1alpha1WorkflowSpec = Field(alias="workflowSpec") + __properties: ClassVar[List[str]] = ["concurrencyPolicy", "failedJobsHistoryLimit", "schedule", "schedules", "startingDeadlineSeconds", "stopStrategy", "successfulJobsHistoryLimit", "suspend", "timezone", "workflowMetadata", "workflowSpec"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of stop_strategy + if self.stop_strategy: + _dict['stopStrategy'] = self.stop_strategy.to_dict() + # override the default output from pydantic by calling `to_dict()` of workflow_metadata + if self.workflow_metadata: + _dict['workflowMetadata'] = self.workflow_metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of workflow_spec + if self.workflow_spec: + _dict['workflowSpec'] = self.workflow_spec.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "concurrencyPolicy": obj.get("concurrencyPolicy"), + "failedJobsHistoryLimit": obj.get("failedJobsHistoryLimit"), + "schedule": obj.get("schedule"), + "schedules": obj.get("schedules"), + "startingDeadlineSeconds": obj.get("startingDeadlineSeconds"), + "stopStrategy": IoArgoprojWorkflowV1alpha1StopStrategy.from_dict(obj["stopStrategy"]) if obj.get("stopStrategy") is not None else None, + "successfulJobsHistoryLimit": obj.get("successfulJobsHistoryLimit"), + "suspend": obj.get("suspend"), + "timezone": obj.get("timezone"), + "workflowMetadata": ObjectMeta.from_dict(obj["workflowMetadata"]) if obj.get("workflowMetadata") is not None else None, + "workflowSpec": IoArgoprojWorkflowV1alpha1WorkflowSpec.from_dict(obj["workflowSpec"]) if obj.get("workflowSpec") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_status.py new file mode 100644 index 000000000000..0ab79aebfb67 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_status.py @@ -0,0 +1,114 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition +from argo_workflows.models.object_reference import ObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1CronWorkflowStatus(BaseModel): + """ + CronWorkflowStatus is the status of a CronWorkflow + """ # noqa: E501 + active: List[ObjectReference] = Field(description="Active is a list of active workflows stemming from this CronWorkflow") + conditions: List[IoArgoprojWorkflowV1alpha1Condition] = Field(description="Conditions is a list of conditions the CronWorkflow may have") + failed: StrictInt = Field(description="Failed is a counter of how many times a child workflow terminated in failed or errored state") + last_scheduled_time: datetime = Field(description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="lastScheduledTime") + phase: StrictStr = Field(description="Phase defines the cron workflow phase. It is changed to Stopped when the stopping condition is achieved which stops new CronWorkflows from running") + succeeded: StrictInt = Field(description="Succeeded is a counter of how many times the child workflows had success") + __properties: ClassVar[List[str]] = ["active", "conditions", "failed", "lastScheduledTime", "phase", "succeeded"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in active (list) + _items = [] + if self.active: + for _item in self.active: + if _item: + _items.append(_item.to_dict()) + _dict['active'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in conditions (list) + _items = [] + if self.conditions: + for _item in self.conditions: + if _item: + _items.append(_item.to_dict()) + _dict['conditions'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "active": [ObjectReference.from_dict(_item) for _item in obj["active"]] if obj.get("active") is not None else None, + "conditions": [IoArgoprojWorkflowV1alpha1Condition.from_dict(_item) for _item in obj["conditions"]] if obj.get("conditions") is not None else None, + "failed": obj.get("failed"), + "lastScheduledTime": obj.get("lastScheduledTime"), + "phase": obj.get("phase"), + "succeeded": obj.get("succeeded") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request.py new file mode 100644 index 000000000000..3ca6605a7fef --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest + """ # noqa: E501 + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_dag_task.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_dag_task.py new file mode 100644 index 000000000000..31e5499e6147 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_dag_task.py @@ -0,0 +1,148 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments +from argo_workflows.models.io_argoproj_workflow_v1alpha1_continue_on import IoArgoprojWorkflowV1alpha1ContinueOn +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook +from argo_workflows.models.io_argoproj_workflow_v1alpha1_sequence import IoArgoprojWorkflowV1alpha1Sequence +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1DAGTask(BaseModel): + """ + DAGTask represents a node in the graph during DAG execution + """ # noqa: E501 + arguments: Optional[IoArgoprojWorkflowV1alpha1Arguments] = None + continue_on: Optional[IoArgoprojWorkflowV1alpha1ContinueOn] = Field(default=None, alias="continueOn") + dependencies: Optional[List[StrictStr]] = Field(default=None, description="Dependencies are name of other targets which this depends on") + depends: Optional[StrictStr] = Field(default=None, description="Depends are name of other targets which this depends on") + hooks: Optional[Dict[str, IoArgoprojWorkflowV1alpha1LifecycleHook]] = Field(default=None, description="Hooks hold the lifecycle hook which is invoked at lifecycle of task, irrespective of the success, failure, or error status of the primary task") + inline: Optional[IoArgoprojWorkflowV1alpha1Template] = None + name: StrictStr = Field(description="Name is the name of the target") + on_exit: Optional[StrictStr] = Field(default=None, description="OnExit is a template reference which is invoked at the end of the template, irrespective of the success, failure, or error of the primary template. DEPRECATED: Use Hooks[exit].Template instead.", alias="onExit") + template: Optional[StrictStr] = Field(default=None, description="Name of template to execute") + template_ref: Optional[IoArgoprojWorkflowV1alpha1TemplateRef] = Field(default=None, alias="templateRef") + when: Optional[StrictStr] = Field(default=None, description="When is an expression in which the task should conditionally execute") + with_items: Optional[List[Dict[str, Any]]] = Field(default=None, description="WithItems expands a task into multiple parallel tasks from the items in the list", alias="withItems") + with_param: Optional[StrictStr] = Field(default=None, description="WithParam expands a task into multiple parallel tasks from the value in the parameter, which is expected to be a JSON list.", alias="withParam") + with_sequence: Optional[IoArgoprojWorkflowV1alpha1Sequence] = Field(default=None, alias="withSequence") + __properties: ClassVar[List[str]] = ["arguments", "continueOn", "dependencies", "depends", "hooks", "inline", "name", "onExit", "template", "templateRef", "when", "withItems", "withParam", "withSequence"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1DAGTask from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of arguments + if self.arguments: + _dict['arguments'] = self.arguments.to_dict() + # override the default output from pydantic by calling `to_dict()` of continue_on + if self.continue_on: + _dict['continueOn'] = self.continue_on.to_dict() + # override the default output from pydantic by calling `to_dict()` of each value in hooks (dict) + _field_dict = {} + if self.hooks: + for _key in self.hooks: + if self.hooks[_key]: + _field_dict[_key] = self.hooks[_key].to_dict() + _dict['hooks'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of inline + if self.inline: + _dict['inline'] = self.inline.to_dict() + # override the default output from pydantic by calling `to_dict()` of template_ref + if self.template_ref: + _dict['templateRef'] = self.template_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of with_sequence + if self.with_sequence: + _dict['withSequence'] = self.with_sequence.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1DAGTask from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "arguments": IoArgoprojWorkflowV1alpha1Arguments.from_dict(obj["arguments"]) if obj.get("arguments") is not None else None, + "continueOn": IoArgoprojWorkflowV1alpha1ContinueOn.from_dict(obj["continueOn"]) if obj.get("continueOn") is not None else None, + "dependencies": obj.get("dependencies"), + "depends": obj.get("depends"), + "hooks": dict( + (_k, IoArgoprojWorkflowV1alpha1LifecycleHook.from_dict(_v)) + for _k, _v in obj["hooks"].items() + ) + if obj.get("hooks") is not None + else None, + "inline": IoArgoprojWorkflowV1alpha1Template.from_dict(obj["inline"]) if obj.get("inline") is not None else None, + "name": obj.get("name"), + "onExit": obj.get("onExit"), + "template": obj.get("template"), + "templateRef": IoArgoprojWorkflowV1alpha1TemplateRef.from_dict(obj["templateRef"]) if obj.get("templateRef") is not None else None, + "when": obj.get("when"), + "withItems": obj.get("withItems"), + "withParam": obj.get("withParam"), + "withSequence": IoArgoprojWorkflowV1alpha1Sequence.from_dict(obj["withSequence"]) if obj.get("withSequence") is not None else None + }) + return _obj + +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template +# TODO: Rewrite to not use raise_errors +IoArgoprojWorkflowV1alpha1DAGTask.model_rebuild(raise_errors=False) + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_dag_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_dag_template.py new file mode 100644 index 000000000000..55b3396195f4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_dag_template.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1DAGTemplate(BaseModel): + """ + DAGTemplate is a template subtype for directed acyclic graph templates + """ # noqa: E501 + fail_fast: Optional[StrictBool] = Field(default=None, description="This flag is for DAG logic. The DAG logic has a built-in \"fail fast\" feature to stop scheduling new steps, as soon as it detects that one of the DAG nodes is failed. Then it waits until all DAG nodes are completed before failing the DAG itself. The FailFast flag default is true, if set to false, it will allow a DAG to run all branches of the DAG to completion (either success or failure), regardless of the failed outcomes of branches in the DAG. More info and example about this feature at https://github.com/argoproj/argo-workflows/issues/1442", alias="failFast") + target: Optional[StrictStr] = Field(default=None, description="Target are one or more names of targets to execute in a DAG") + tasks: List[IoArgoprojWorkflowV1alpha1DAGTask] = Field(description="Tasks are a list of DAG tasks") + __properties: ClassVar[List[str]] = ["failFast", "target", "tasks"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1DAGTemplate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in tasks (list) + _items = [] + if self.tasks: + for _item in self.tasks: + if _item: + _items.append(_item.to_dict()) + _dict['tasks'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1DAGTemplate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "failFast": obj.get("failFast"), + "target": obj.get("target"), + "tasks": [IoArgoprojWorkflowV1alpha1DAGTask.from_dict(_item) for _item in obj["tasks"]] if obj.get("tasks") is not None else None + }) + return _obj + +from argo_workflows.models.io_argoproj_workflow_v1alpha1_dag_task import IoArgoprojWorkflowV1alpha1DAGTask +# TODO: Rewrite to not use raise_errors +IoArgoprojWorkflowV1alpha1DAGTemplate.model_rebuild(raise_errors=False) + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_data.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_data.py new file mode 100644 index 000000000000..8bdb82b9aaea --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_data.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.io_argoproj_workflow_v1alpha1_data_source import IoArgoprojWorkflowV1alpha1DataSource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_transformation_step import IoArgoprojWorkflowV1alpha1TransformationStep +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Data(BaseModel): + """ + Data is a data template + """ # noqa: E501 + source: IoArgoprojWorkflowV1alpha1DataSource + transformation: List[IoArgoprojWorkflowV1alpha1TransformationStep] = Field(description="Transformation applies a set of transformations") + __properties: ClassVar[List[str]] = ["source", "transformation"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Data from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of source + if self.source: + _dict['source'] = self.source.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in transformation (list) + _items = [] + if self.transformation: + for _item in self.transformation: + if _item: + _items.append(_item.to_dict()) + _dict['transformation'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Data from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "source": IoArgoprojWorkflowV1alpha1DataSource.from_dict(obj["source"]) if obj.get("source") is not None else None, + "transformation": [IoArgoprojWorkflowV1alpha1TransformationStep.from_dict(_item) for _item in obj["transformation"]] if obj.get("transformation") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_data_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_data_source.py new file mode 100644 index 000000000000..9af29d248c20 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_data_source.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_paths import IoArgoprojWorkflowV1alpha1ArtifactPaths +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1DataSource(BaseModel): + """ + DataSource sources external data into a data template + """ # noqa: E501 + artifact_paths: Optional[IoArgoprojWorkflowV1alpha1ArtifactPaths] = Field(default=None, alias="artifactPaths") + __properties: ClassVar[List[str]] = ["artifactPaths"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1DataSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of artifact_paths + if self.artifact_paths: + _dict['artifactPaths'] = self.artifact_paths.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1DataSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifactPaths": IoArgoprojWorkflowV1alpha1ArtifactPaths.from_dict(obj["artifactPaths"]) if obj.get("artifactPaths") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_event.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_event.py new file mode 100644 index 000000000000..aa59af7f2d4a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_event.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Event(BaseModel): + """ + IoArgoprojWorkflowV1alpha1Event + """ # noqa: E501 + selector: StrictStr = Field(description="Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"`") + __properties: ClassVar[List[str]] = ["selector"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Event from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Event from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "selector": obj.get("selector") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_executor_config.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_executor_config.py new file mode 100644 index 000000000000..90aa0fd61d14 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_executor_config.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ExecutorConfig(BaseModel): + """ + ExecutorConfig holds configurations of an executor container. + """ # noqa: E501 + service_account_name: Optional[StrictStr] = Field(default=None, description="ServiceAccountName specifies the service account name of the executor container.", alias="serviceAccountName") + __properties: ClassVar[List[str]] = ["serviceAccountName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ExecutorConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ExecutorConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "serviceAccountName": obj.get("serviceAccountName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gauge.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gauge.py new file mode 100644 index 000000000000..e8550193dcf2 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gauge.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Gauge(BaseModel): + """ + Gauge is a Gauge prometheus metric + """ # noqa: E501 + operation: Optional[StrictStr] = Field(default=None, description="Operation defines the operation to apply with value and the metrics' current value") + realtime: StrictBool = Field(description="Realtime emits this metric in real time if applicable") + value: StrictStr = Field(description="Value is the value to be used in the operation with the metric's current value. If no operation is set, value is the value of the metric") + __properties: ClassVar[List[str]] = ["operation", "realtime", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Gauge from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Gauge from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "operation": obj.get("operation"), + "realtime": obj.get("realtime"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gcs_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gcs_artifact.py new file mode 100644 index 000000000000..66e147800fb5 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gcs_artifact.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1GCSArtifact(BaseModel): + """ + GCSArtifact is the location of a GCS artifact + """ # noqa: E501 + bucket: Optional[StrictStr] = Field(default=None, description="Bucket is the name of the bucket") + key: StrictStr = Field(description="Key is the path in the bucket where the artifact resides") + service_account_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="serviceAccountKeySecret") + __properties: ClassVar[List[str]] = ["bucket", "key", "serviceAccountKeySecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1GCSArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of service_account_key_secret + if self.service_account_key_secret: + _dict['serviceAccountKeySecret'] = self.service_account_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1GCSArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "bucket": obj.get("bucket"), + "key": obj.get("key"), + "serviceAccountKeySecret": SecretKeySelector.from_dict(obj["serviceAccountKeySecret"]) if obj.get("serviceAccountKeySecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gcs_artifact_repository.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gcs_artifact_repository.py new file mode 100644 index 000000000000..e720dd0cd33e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_gcs_artifact_repository.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1GCSArtifactRepository(BaseModel): + """ + GCSArtifactRepository defines the controller configuration for a GCS artifact repository + """ # noqa: E501 + bucket: Optional[StrictStr] = Field(default=None, description="Bucket is the name of the bucket") + key_format: Optional[StrictStr] = Field(default=None, description="KeyFormat defines the format of how to store keys and can reference workflow variables.", alias="keyFormat") + service_account_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="serviceAccountKeySecret") + __properties: ClassVar[List[str]] = ["bucket", "keyFormat", "serviceAccountKeySecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1GCSArtifactRepository from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of service_account_key_secret + if self.service_account_key_secret: + _dict['serviceAccountKeySecret'] = self.service_account_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1GCSArtifactRepository from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "bucket": obj.get("bucket"), + "keyFormat": obj.get("keyFormat"), + "serviceAccountKeySecret": SecretKeySelector.from_dict(obj["serviceAccountKeySecret"]) if obj.get("serviceAccountKeySecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_get_user_info_response.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_get_user_info_response.py new file mode 100644 index 000000000000..6039ddec3e80 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_get_user_info_response.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1GetUserInfoResponse(BaseModel): + """ + IoArgoprojWorkflowV1alpha1GetUserInfoResponse + """ # noqa: E501 + email: Optional[StrictStr] = None + email_verified: Optional[StrictBool] = Field(default=None, alias="emailVerified") + groups: Optional[List[StrictStr]] = None + issuer: Optional[StrictStr] = None + name: Optional[StrictStr] = None + service_account_name: Optional[StrictStr] = Field(default=None, alias="serviceAccountName") + service_account_namespace: Optional[StrictStr] = Field(default=None, alias="serviceAccountNamespace") + subject: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["email", "emailVerified", "groups", "issuer", "name", "serviceAccountName", "serviceAccountNamespace", "subject"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1GetUserInfoResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1GetUserInfoResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "email": obj.get("email"), + "emailVerified": obj.get("emailVerified"), + "groups": obj.get("groups"), + "issuer": obj.get("issuer"), + "name": obj.get("name"), + "serviceAccountName": obj.get("serviceAccountName"), + "serviceAccountNamespace": obj.get("serviceAccountNamespace"), + "subject": obj.get("subject") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_git_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_git_artifact.py new file mode 100644 index 000000000000..38054f1f3bfa --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_git_artifact.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1GitArtifact(BaseModel): + """ + GitArtifact is the location of an git artifact + """ # noqa: E501 + branch: Optional[StrictStr] = Field(default=None, description="Branch is the branch to fetch when `SingleBranch` is enabled") + depth: Optional[StrictInt] = Field(default=None, description="Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip") + disable_submodules: Optional[StrictBool] = Field(default=None, description="DisableSubmodules disables submodules during git clone", alias="disableSubmodules") + fetch: Optional[List[StrictStr]] = Field(default=None, description="Fetch specifies a number of refs that should be fetched before checkout") + insecure_ignore_host_key: Optional[StrictBool] = Field(default=None, description="InsecureIgnoreHostKey disables SSH strict host key checking during git clone", alias="insecureIgnoreHostKey") + password_secret: Optional[SecretKeySelector] = Field(default=None, alias="passwordSecret") + repo: StrictStr = Field(description="Repo is the git repository") + revision: Optional[StrictStr] = Field(default=None, description="Revision is the git commit, tag, branch to checkout") + single_branch: Optional[StrictBool] = Field(default=None, description="SingleBranch enables single branch clone, using the `branch` parameter", alias="singleBranch") + ssh_private_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="sshPrivateKeySecret") + username_secret: Optional[SecretKeySelector] = Field(default=None, alias="usernameSecret") + __properties: ClassVar[List[str]] = ["branch", "depth", "disableSubmodules", "fetch", "insecureIgnoreHostKey", "passwordSecret", "repo", "revision", "singleBranch", "sshPrivateKeySecret", "usernameSecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1GitArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of password_secret + if self.password_secret: + _dict['passwordSecret'] = self.password_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of ssh_private_key_secret + if self.ssh_private_key_secret: + _dict['sshPrivateKeySecret'] = self.ssh_private_key_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of username_secret + if self.username_secret: + _dict['usernameSecret'] = self.username_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1GitArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "branch": obj.get("branch"), + "depth": obj.get("depth"), + "disableSubmodules": obj.get("disableSubmodules"), + "fetch": obj.get("fetch"), + "insecureIgnoreHostKey": obj.get("insecureIgnoreHostKey"), + "passwordSecret": SecretKeySelector.from_dict(obj["passwordSecret"]) if obj.get("passwordSecret") is not None else None, + "repo": obj.get("repo"), + "revision": obj.get("revision"), + "singleBranch": obj.get("singleBranch"), + "sshPrivateKeySecret": SecretKeySelector.from_dict(obj["sshPrivateKeySecret"]) if obj.get("sshPrivateKeySecret") is not None else None, + "usernameSecret": SecretKeySelector.from_dict(obj["usernameSecret"]) if obj.get("usernameSecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_hdfs_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_hdfs_artifact.py new file mode 100644 index 000000000000..7531bd5db8f6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_hdfs_artifact.py @@ -0,0 +1,116 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1HDFSArtifact(BaseModel): + """ + HDFSArtifact is the location of an HDFS artifact + """ # noqa: E501 + addresses: Optional[List[StrictStr]] = Field(default=None, description="Addresses is accessible addresses of HDFS name nodes") + force: Optional[StrictBool] = Field(default=None, description="Force copies a file forcibly even if it exists") + hdfs_user: Optional[StrictStr] = Field(default=None, description="HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.", alias="hdfsUser") + krb_c_cache_secret: Optional[SecretKeySelector] = Field(default=None, alias="krbCCacheSecret") + krb_config_config_map: Optional[ConfigMapKeySelector] = Field(default=None, alias="krbConfigConfigMap") + krb_keytab_secret: Optional[SecretKeySelector] = Field(default=None, alias="krbKeytabSecret") + krb_realm: Optional[StrictStr] = Field(default=None, description="KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used.", alias="krbRealm") + krb_service_principal_name: Optional[StrictStr] = Field(default=None, description="KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used.", alias="krbServicePrincipalName") + krb_username: Optional[StrictStr] = Field(default=None, description="KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.", alias="krbUsername") + path: StrictStr = Field(description="Path is a file path in HDFS") + __properties: ClassVar[List[str]] = ["addresses", "force", "hdfsUser", "krbCCacheSecret", "krbConfigConfigMap", "krbKeytabSecret", "krbRealm", "krbServicePrincipalName", "krbUsername", "path"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HDFSArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of krb_c_cache_secret + if self.krb_c_cache_secret: + _dict['krbCCacheSecret'] = self.krb_c_cache_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of krb_config_config_map + if self.krb_config_config_map: + _dict['krbConfigConfigMap'] = self.krb_config_config_map.to_dict() + # override the default output from pydantic by calling `to_dict()` of krb_keytab_secret + if self.krb_keytab_secret: + _dict['krbKeytabSecret'] = self.krb_keytab_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HDFSArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "addresses": obj.get("addresses"), + "force": obj.get("force"), + "hdfsUser": obj.get("hdfsUser"), + "krbCCacheSecret": SecretKeySelector.from_dict(obj["krbCCacheSecret"]) if obj.get("krbCCacheSecret") is not None else None, + "krbConfigConfigMap": ConfigMapKeySelector.from_dict(obj["krbConfigConfigMap"]) if obj.get("krbConfigConfigMap") is not None else None, + "krbKeytabSecret": SecretKeySelector.from_dict(obj["krbKeytabSecret"]) if obj.get("krbKeytabSecret") is not None else None, + "krbRealm": obj.get("krbRealm"), + "krbServicePrincipalName": obj.get("krbServicePrincipalName"), + "krbUsername": obj.get("krbUsername"), + "path": obj.get("path") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_hdfs_artifact_repository.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_hdfs_artifact_repository.py new file mode 100644 index 000000000000..554a02f740b0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_hdfs_artifact_repository.py @@ -0,0 +1,116 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1HDFSArtifactRepository(BaseModel): + """ + HDFSArtifactRepository defines the controller configuration for an HDFS artifact repository + """ # noqa: E501 + addresses: Optional[List[StrictStr]] = Field(default=None, description="Addresses is accessible addresses of HDFS name nodes") + force: Optional[StrictBool] = Field(default=None, description="Force copies a file forcibly even if it exists") + hdfs_user: Optional[StrictStr] = Field(default=None, description="HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.", alias="hdfsUser") + krb_c_cache_secret: Optional[SecretKeySelector] = Field(default=None, alias="krbCCacheSecret") + krb_config_config_map: Optional[ConfigMapKeySelector] = Field(default=None, alias="krbConfigConfigMap") + krb_keytab_secret: Optional[SecretKeySelector] = Field(default=None, alias="krbKeytabSecret") + krb_realm: Optional[StrictStr] = Field(default=None, description="KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used.", alias="krbRealm") + krb_service_principal_name: Optional[StrictStr] = Field(default=None, description="KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used.", alias="krbServicePrincipalName") + krb_username: Optional[StrictStr] = Field(default=None, description="KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.", alias="krbUsername") + path_format: Optional[StrictStr] = Field(default=None, description="PathFormat is defines the format of path to store a file. Can reference workflow variables", alias="pathFormat") + __properties: ClassVar[List[str]] = ["addresses", "force", "hdfsUser", "krbCCacheSecret", "krbConfigConfigMap", "krbKeytabSecret", "krbRealm", "krbServicePrincipalName", "krbUsername", "pathFormat"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HDFSArtifactRepository from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of krb_c_cache_secret + if self.krb_c_cache_secret: + _dict['krbCCacheSecret'] = self.krb_c_cache_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of krb_config_config_map + if self.krb_config_config_map: + _dict['krbConfigConfigMap'] = self.krb_config_config_map.to_dict() + # override the default output from pydantic by calling `to_dict()` of krb_keytab_secret + if self.krb_keytab_secret: + _dict['krbKeytabSecret'] = self.krb_keytab_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HDFSArtifactRepository from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "addresses": obj.get("addresses"), + "force": obj.get("force"), + "hdfsUser": obj.get("hdfsUser"), + "krbCCacheSecret": SecretKeySelector.from_dict(obj["krbCCacheSecret"]) if obj.get("krbCCacheSecret") is not None else None, + "krbConfigConfigMap": ConfigMapKeySelector.from_dict(obj["krbConfigConfigMap"]) if obj.get("krbConfigConfigMap") is not None else None, + "krbKeytabSecret": SecretKeySelector.from_dict(obj["krbKeytabSecret"]) if obj.get("krbKeytabSecret") is not None else None, + "krbRealm": obj.get("krbRealm"), + "krbServicePrincipalName": obj.get("krbServicePrincipalName"), + "krbUsername": obj.get("krbUsername"), + "pathFormat": obj.get("pathFormat") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_header.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_header.py new file mode 100644 index 000000000000..b69a34f4bef5 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_header.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Header(BaseModel): + """ + Header indicate a key-value request header to be used when fetching artifacts over HTTP + """ # noqa: E501 + name: StrictStr = Field(description="Name is the header name") + value: StrictStr = Field(description="Value is the literal value to use for the header") + __properties: ClassVar[List[str]] = ["name", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Header from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Header from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_histogram.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_histogram.py new file mode 100644 index 000000000000..54e532262bfd --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_histogram.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Union +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Histogram(BaseModel): + """ + Histogram is a Histogram prometheus metric + """ # noqa: E501 + buckets: List[Union[StrictFloat, StrictInt]] = Field(description="Buckets is a list of bucket divisors for the histogram") + value: StrictStr = Field(description="Value is the value of the metric") + __properties: ClassVar[List[str]] = ["buckets", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Histogram from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Histogram from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "buckets": obj.get("buckets"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http.py new file mode 100644 index 000000000000..62c247f2ee0b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_body_source import IoArgoprojWorkflowV1alpha1HTTPBodySource +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_header import IoArgoprojWorkflowV1alpha1HTTPHeader +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1HTTP(BaseModel): + """ + IoArgoprojWorkflowV1alpha1HTTP + """ # noqa: E501 + body: Optional[StrictStr] = Field(default=None, description="Body is content of the HTTP Request") + body_from: Optional[IoArgoprojWorkflowV1alpha1HTTPBodySource] = Field(default=None, alias="bodyFrom") + headers: Optional[List[IoArgoprojWorkflowV1alpha1HTTPHeader]] = Field(default=None, description="Headers are an optional list of headers to send with HTTP requests") + insecure_skip_verify: Optional[StrictBool] = Field(default=None, description="InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client", alias="insecureSkipVerify") + method: Optional[StrictStr] = Field(default=None, description="Method is HTTP methods for HTTP Request") + success_condition: Optional[StrictStr] = Field(default=None, description="SuccessCondition is an expression if evaluated to true is considered successful", alias="successCondition") + timeout_seconds: Optional[StrictInt] = Field(default=None, description="TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds", alias="timeoutSeconds") + url: StrictStr = Field(description="URL of the HTTP Request") + __properties: ClassVar[List[str]] = ["body", "bodyFrom", "headers", "insecureSkipVerify", "method", "successCondition", "timeoutSeconds", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTP from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of body_from + if self.body_from: + _dict['bodyFrom'] = self.body_from.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in headers (list) + _items = [] + if self.headers: + for _item in self.headers: + if _item: + _items.append(_item.to_dict()) + _dict['headers'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTP from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "body": obj.get("body"), + "bodyFrom": IoArgoprojWorkflowV1alpha1HTTPBodySource.from_dict(obj["bodyFrom"]) if obj.get("bodyFrom") is not None else None, + "headers": [IoArgoprojWorkflowV1alpha1HTTPHeader.from_dict(_item) for _item in obj["headers"]] if obj.get("headers") is not None else None, + "insecureSkipVerify": obj.get("insecureSkipVerify"), + "method": obj.get("method"), + "successCondition": obj.get("successCondition"), + "timeoutSeconds": obj.get("timeoutSeconds"), + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_artifact.py new file mode 100644 index 000000000000..fe0db46ef0bd --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_artifact.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_header import IoArgoprojWorkflowV1alpha1Header +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_auth import IoArgoprojWorkflowV1alpha1HTTPAuth +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1HTTPArtifact(BaseModel): + """ + HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container + """ # noqa: E501 + auth: Optional[IoArgoprojWorkflowV1alpha1HTTPAuth] = None + headers: Optional[List[IoArgoprojWorkflowV1alpha1Header]] = Field(default=None, description="Headers are an optional list of headers to send with HTTP requests for artifacts") + url: StrictStr = Field(description="URL of the artifact") + __properties: ClassVar[List[str]] = ["auth", "headers", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of auth + if self.auth: + _dict['auth'] = self.auth.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in headers (list) + _items = [] + if self.headers: + for _item in self.headers: + if _item: + _items.append(_item.to_dict()) + _dict['headers'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "auth": IoArgoprojWorkflowV1alpha1HTTPAuth.from_dict(obj["auth"]) if obj.get("auth") is not None else None, + "headers": [IoArgoprojWorkflowV1alpha1Header.from_dict(_item) for _item in obj["headers"]] if obj.get("headers") is not None else None, + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_auth.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_auth.py new file mode 100644 index 000000000000..a9cc0f73bb0e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_auth.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_basic_auth import IoArgoprojWorkflowV1alpha1BasicAuth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_client_cert_auth import IoArgoprojWorkflowV1alpha1ClientCertAuth +from argo_workflows.models.io_argoproj_workflow_v1alpha1_o_auth2_auth import IoArgoprojWorkflowV1alpha1OAuth2Auth +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1HTTPAuth(BaseModel): + """ + IoArgoprojWorkflowV1alpha1HTTPAuth + """ # noqa: E501 + basic_auth: Optional[IoArgoprojWorkflowV1alpha1BasicAuth] = Field(default=None, alias="basicAuth") + client_cert: Optional[IoArgoprojWorkflowV1alpha1ClientCertAuth] = Field(default=None, alias="clientCert") + oauth2: Optional[IoArgoprojWorkflowV1alpha1OAuth2Auth] = None + __properties: ClassVar[List[str]] = ["basicAuth", "clientCert", "oauth2"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPAuth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of basic_auth + if self.basic_auth: + _dict['basicAuth'] = self.basic_auth.to_dict() + # override the default output from pydantic by calling `to_dict()` of client_cert + if self.client_cert: + _dict['clientCert'] = self.client_cert.to_dict() + # override the default output from pydantic by calling `to_dict()` of oauth2 + if self.oauth2: + _dict['oauth2'] = self.oauth2.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPAuth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "basicAuth": IoArgoprojWorkflowV1alpha1BasicAuth.from_dict(obj["basicAuth"]) if obj.get("basicAuth") is not None else None, + "clientCert": IoArgoprojWorkflowV1alpha1ClientCertAuth.from_dict(obj["clientCert"]) if obj.get("clientCert") is not None else None, + "oauth2": IoArgoprojWorkflowV1alpha1OAuth2Auth.from_dict(obj["oauth2"]) if obj.get("oauth2") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_body_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_body_source.py new file mode 100644 index 000000000000..abfb43478a5e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_body_source.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1HTTPBodySource(BaseModel): + """ + HTTPBodySource contains the source of the HTTP body. + """ # noqa: E501 + bytes: Optional[Union[Annotated[bytes, Field(strict=True)], Annotated[str, Field(strict=True)]]] = None + __properties: ClassVar[List[str]] = ["bytes"] + + @field_validator('bytes') + def bytes_validate_regular_expression(cls, value): + """Validates the regular expression""" + if value is None: + return value + + if not re.match(r"^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$", value): + raise ValueError(r"must validate the regular expression /^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPBodySource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPBodySource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "bytes": obj.get("bytes") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_header.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_header.py new file mode 100644 index 000000000000..23a7359c0b77 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_header.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_header_source import IoArgoprojWorkflowV1alpha1HTTPHeaderSource +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1HTTPHeader(BaseModel): + """ + IoArgoprojWorkflowV1alpha1HTTPHeader + """ # noqa: E501 + name: StrictStr + value: Optional[StrictStr] = None + value_from: Optional[IoArgoprojWorkflowV1alpha1HTTPHeaderSource] = Field(default=None, alias="valueFrom") + __properties: ClassVar[List[str]] = ["name", "value", "valueFrom"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPHeader from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of value_from + if self.value_from: + _dict['valueFrom'] = self.value_from.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPHeader from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "value": obj.get("value"), + "valueFrom": IoArgoprojWorkflowV1alpha1HTTPHeaderSource.from_dict(obj["valueFrom"]) if obj.get("valueFrom") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_header_source.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_header_source.py new file mode 100644 index 000000000000..b88a229955af --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_http_header_source.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1HTTPHeaderSource(BaseModel): + """ + IoArgoprojWorkflowV1alpha1HTTPHeaderSource + """ # noqa: E501 + secret_key_ref: Optional[SecretKeySelector] = Field(default=None, alias="secretKeyRef") + __properties: ClassVar[List[str]] = ["secretKeyRef"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPHeaderSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of secret_key_ref + if self.secret_key_ref: + _dict['secretKeyRef'] = self.secret_key_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1HTTPHeaderSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "secretKeyRef": SecretKeySelector.from_dict(obj["secretKeyRef"]) if obj.get("secretKeyRef") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_info_response.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_info_response.py new file mode 100644 index 000000000000..c4cfd5c4ae2e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_info_response.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_column import IoArgoprojWorkflowV1alpha1Column +from argo_workflows.models.io_argoproj_workflow_v1alpha1_link import IoArgoprojWorkflowV1alpha1Link +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1InfoResponse(BaseModel): + """ + IoArgoprojWorkflowV1alpha1InfoResponse + """ # noqa: E501 + columns: Optional[List[IoArgoprojWorkflowV1alpha1Column]] = None + links: Optional[List[IoArgoprojWorkflowV1alpha1Link]] = None + managed_namespace: Optional[StrictStr] = Field(default=None, alias="managedNamespace") + modals: Optional[Dict[str, StrictBool]] = None + nav_color: Optional[StrictStr] = Field(default=None, alias="navColor") + __properties: ClassVar[List[str]] = ["columns", "links", "managedNamespace", "modals", "navColor"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1InfoResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in columns (list) + _items = [] + if self.columns: + for _item in self.columns: + if _item: + _items.append(_item.to_dict()) + _dict['columns'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in links (list) + _items = [] + if self.links: + for _item in self.links: + if _item: + _items.append(_item.to_dict()) + _dict['links'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1InfoResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "columns": [IoArgoprojWorkflowV1alpha1Column.from_dict(_item) for _item in obj["columns"]] if obj.get("columns") is not None else None, + "links": [IoArgoprojWorkflowV1alpha1Link.from_dict(_item) for _item in obj["links"]] if obj.get("links") is not None else None, + "managedNamespace": obj.get("managedNamespace"), + "modals": obj.get("modals"), + "navColor": obj.get("navColor") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_inputs.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_inputs.py new file mode 100644 index 000000000000..87ab41f4f4d3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_inputs.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Inputs(BaseModel): + """ + Inputs are the mechanism for passing parameters, artifacts, volumes from one template to another + """ # noqa: E501 + artifacts: Optional[List[IoArgoprojWorkflowV1alpha1Artifact]] = Field(default=None, description="Artifact are a list of artifacts passed as inputs") + parameters: Optional[List[IoArgoprojWorkflowV1alpha1Parameter]] = Field(default=None, description="Parameters are a list of parameters passed as inputs") + __properties: ClassVar[List[str]] = ["artifacts", "parameters"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Inputs from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in artifacts (list) + _items = [] + if self.artifacts: + for _item in self.artifacts: + if _item: + _items.append(_item.to_dict()) + _dict['artifacts'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Inputs from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifacts": [IoArgoprojWorkflowV1alpha1Artifact.from_dict(_item) for _item in obj["artifacts"]] if obj.get("artifacts") is not None else None, + "parameters": [IoArgoprojWorkflowV1alpha1Parameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_keys.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_keys.py new file mode 100644 index 000000000000..a80cc05362c9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_keys.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1LabelKeys(BaseModel): + """ + LabelKeys is list of keys + """ # noqa: E501 + items: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["items"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LabelKeys from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LabelKeys from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "items": obj.get("items") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_value_from.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_value_from.py new file mode 100644 index 000000000000..27948a3e64a7 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_value_from.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1LabelValueFrom(BaseModel): + """ + IoArgoprojWorkflowV1alpha1LabelValueFrom + """ # noqa: E501 + expression: StrictStr + __properties: ClassVar[List[str]] = ["expression"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LabelValueFrom from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LabelValueFrom from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "expression": obj.get("expression") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_values.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_values.py new file mode 100644 index 000000000000..cd3364a5cc5f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_label_values.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1LabelValues(BaseModel): + """ + Labels is list of workflow labels + """ # noqa: E501 + items: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["items"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LabelValues from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LabelValues from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "items": obj.get("items") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_lifecycle_hook.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_lifecycle_hook.py new file mode 100644 index 000000000000..597f6686cc96 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_lifecycle_hook.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1LifecycleHook(BaseModel): + """ + IoArgoprojWorkflowV1alpha1LifecycleHook + """ # noqa: E501 + arguments: Optional[IoArgoprojWorkflowV1alpha1Arguments] = None + expression: Optional[StrictStr] = Field(default=None, description="Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored") + template: Optional[StrictStr] = Field(default=None, description="Template is the name of the template to execute by the hook") + template_ref: Optional[IoArgoprojWorkflowV1alpha1TemplateRef] = Field(default=None, alias="templateRef") + __properties: ClassVar[List[str]] = ["arguments", "expression", "template", "templateRef"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LifecycleHook from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of arguments + if self.arguments: + _dict['arguments'] = self.arguments.to_dict() + # override the default output from pydantic by calling `to_dict()` of template_ref + if self.template_ref: + _dict['templateRef'] = self.template_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LifecycleHook from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "arguments": IoArgoprojWorkflowV1alpha1Arguments.from_dict(obj["arguments"]) if obj.get("arguments") is not None else None, + "expression": obj.get("expression"), + "template": obj.get("template"), + "templateRef": IoArgoprojWorkflowV1alpha1TemplateRef.from_dict(obj["templateRef"]) if obj.get("templateRef") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_link.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_link.py new file mode 100644 index 000000000000..174f3d24c116 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_link.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Link(BaseModel): + """ + A link to another app. + """ # noqa: E501 + name: StrictStr = Field(description="The name of the link, E.g. \"Workflow Logs\" or \"Pod Logs\"") + scope: StrictStr = Field(description="\"workflow\", \"pod\", \"pod-logs\", \"event-source-logs\", \"sensor-logs\", \"workflow-list\" or \"chat\"") + url: StrictStr = Field(description="The URL. Can contain \"${metadata.namespace}\", \"${metadata.name}\", \"${status.startedAt}\", \"${status.finishedAt}\" or any other element in workflow yaml, e.g. \"${io.argoproj.workflow.v1alpha1.metadata.annotations.userDefinedKey}\"") + __properties: ClassVar[List[str]] = ["name", "scope", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Link from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Link from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "scope": obj.get("scope"), + "url": obj.get("url") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_lint_cron_workflow_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_lint_cron_workflow_request.py new file mode 100644 index 000000000000..f219a411cf34 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_lint_cron_workflow_request.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest + """ # noqa: E501 + cron_workflow: Optional[IoArgoprojWorkflowV1alpha1CronWorkflow] = Field(default=None, alias="cronWorkflow") + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["cronWorkflow", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of cron_workflow + if self.cron_workflow: + _dict['cronWorkflow'] = self.cron_workflow.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cronWorkflow": IoArgoprojWorkflowV1alpha1CronWorkflow.from_dict(obj["cronWorkflow"]) if obj.get("cronWorkflow") is not None else None, + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_log_entry.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_log_entry.py new file mode 100644 index 000000000000..4bf8c25f6247 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_log_entry.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1LogEntry(BaseModel): + """ + IoArgoprojWorkflowV1alpha1LogEntry + """ # noqa: E501 + content: Optional[StrictStr] = None + pod_name: Optional[StrictStr] = Field(default=None, alias="podName") + __properties: ClassVar[List[str]] = ["content", "podName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LogEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1LogEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "content": obj.get("content"), + "podName": obj.get("podName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_manifest_from.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_manifest_from.py new file mode 100644 index 000000000000..46e748bcb233 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_manifest_from.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ManifestFrom(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ManifestFrom + """ # noqa: E501 + artifact: IoArgoprojWorkflowV1alpha1Artifact + __properties: ClassVar[List[str]] = ["artifact"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ManifestFrom from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of artifact + if self.artifact: + _dict['artifact'] = self.artifact.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ManifestFrom from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifact": IoArgoprojWorkflowV1alpha1Artifact.from_dict(obj["artifact"]) if obj.get("artifact") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_memoization_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_memoization_status.py new file mode 100644 index 000000000000..1527be2df355 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_memoization_status.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1MemoizationStatus(BaseModel): + """ + MemoizationStatus is the status of this memoized node + """ # noqa: E501 + cache_name: StrictStr = Field(description="Cache is the name of the cache that was used", alias="cacheName") + hit: StrictBool = Field(description="Hit indicates whether this node was created from a cache entry") + key: StrictStr = Field(description="Key is the name of the key used for this node's cache") + __properties: ClassVar[List[str]] = ["cacheName", "hit", "key"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1MemoizationStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1MemoizationStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cacheName": obj.get("cacheName"), + "hit": obj.get("hit"), + "key": obj.get("key") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_memoize.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_memoize.py new file mode 100644 index 000000000000..33cef03115e1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_memoize.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cache import IoArgoprojWorkflowV1alpha1Cache +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Memoize(BaseModel): + """ + Memoization enables caching for the Outputs of the template + """ # noqa: E501 + cache: IoArgoprojWorkflowV1alpha1Cache + key: StrictStr = Field(description="Key is the key to use as the caching key") + max_age: StrictStr = Field(description="MaxAge is the maximum age (e.g. \"180s\", \"24h\") of an entry that is still considered valid. If an entry is older than the MaxAge, it will be ignored.", alias="maxAge") + __properties: ClassVar[List[str]] = ["cache", "key", "maxAge"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Memoize from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of cache + if self.cache: + _dict['cache'] = self.cache.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Memoize from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cache": IoArgoprojWorkflowV1alpha1Cache.from_dict(obj["cache"]) if obj.get("cache") is not None else None, + "key": obj.get("key"), + "maxAge": obj.get("maxAge") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metadata.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metadata.py new file mode 100644 index 000000000000..c472a6e045ad --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metadata.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Metadata(BaseModel): + """ + Pod metdata + """ # noqa: E501 + annotations: Optional[Dict[str, StrictStr]] = None + labels: Optional[Dict[str, StrictStr]] = None + __properties: ClassVar[List[str]] = ["annotations", "labels"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Metadata from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Metadata from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "annotations": obj.get("annotations"), + "labels": obj.get("labels") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metric_label.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metric_label.py new file mode 100644 index 000000000000..cc09fa740bef --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metric_label.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1MetricLabel(BaseModel): + """ + MetricLabel is a single label for a prometheus metric + """ # noqa: E501 + key: StrictStr + value: StrictStr + __properties: ClassVar[List[str]] = ["key", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1MetricLabel from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1MetricLabel from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metrics.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metrics.py new file mode 100644 index 000000000000..bb6a90ccabe0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_metrics.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.io_argoproj_workflow_v1alpha1_prometheus import IoArgoprojWorkflowV1alpha1Prometheus +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Metrics(BaseModel): + """ + Metrics are a list of metrics emitted from a Workflow/Template + """ # noqa: E501 + prometheus: List[IoArgoprojWorkflowV1alpha1Prometheus] = Field(description="Prometheus is a list of prometheus metrics to be emitted") + __properties: ClassVar[List[str]] = ["prometheus"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Metrics from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in prometheus (list) + _items = [] + if self.prometheus: + for _item in self.prometheus: + if _item: + _items.append(_item.to_dict()) + _dict['prometheus'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Metrics from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "prometheus": [IoArgoprojWorkflowV1alpha1Prometheus.from_dict(_item) for _item in obj["prometheus"]] if obj.get("prometheus") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex.py new file mode 100644 index 000000000000..6bd0b2c002f0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Mutex(BaseModel): + """ + Mutex holds Mutex configuration + """ # noqa: E501 + name: Optional[StrictStr] = Field(default=None, description="name of the mutex") + namespace: Optional[StrictStr] = Field(default=None, description="Namespace is the namespace of the mutex, default: [namespace of workflow]") + __properties: ClassVar[List[str]] = ["name", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Mutex from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Mutex from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex_holding.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex_holding.py new file mode 100644 index 000000000000..b48a24cd09b3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex_holding.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1MutexHolding(BaseModel): + """ + MutexHolding describes the mutex and the object which is holding it. + """ # noqa: E501 + holder: Optional[StrictStr] = Field(default=None, description="Holder is a reference to the object which holds the Mutex. Holding Scenario: 1. Current workflow's NodeID which is holding the lock. e.g: ${NodeID} Waiting Scenario: 1. Current workflow or other workflow NodeID which is holding the lock. e.g: ${WorkflowName}/${NodeID}") + mutex: Optional[StrictStr] = Field(default=None, description="Reference for the mutex e.g: ${namespace}/mutex/${mutexName}") + __properties: ClassVar[List[str]] = ["holder", "mutex"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1MutexHolding from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1MutexHolding from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "holder": obj.get("holder"), + "mutex": obj.get("mutex") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex_status.py new file mode 100644 index 000000000000..a4ef46a81a39 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_mutex_status.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex_holding import IoArgoprojWorkflowV1alpha1MutexHolding +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1MutexStatus(BaseModel): + """ + MutexStatus contains which objects hold mutex locks, and which objects this workflow is waiting on to release locks. + """ # noqa: E501 + holding: Optional[List[IoArgoprojWorkflowV1alpha1MutexHolding]] = Field(default=None, description="Holding is a list of mutexes and their respective objects that are held by mutex lock for this io.argoproj.workflow.v1alpha1.") + waiting: Optional[List[IoArgoprojWorkflowV1alpha1MutexHolding]] = Field(default=None, description="Waiting is a list of mutexes and their respective objects this workflow is waiting for.") + __properties: ClassVar[List[str]] = ["holding", "waiting"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1MutexStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in holding (list) + _items = [] + if self.holding: + for _item in self.holding: + if _item: + _items.append(_item.to_dict()) + _dict['holding'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in waiting (list) + _items = [] + if self.waiting: + for _item in self.waiting: + if _item: + _items.append(_item.to_dict()) + _dict['waiting'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1MutexStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "holding": [IoArgoprojWorkflowV1alpha1MutexHolding.from_dict(_item) for _item in obj["holding"]] if obj.get("holding") is not None else None, + "waiting": [IoArgoprojWorkflowV1alpha1MutexHolding.from_dict(_item) for _item in obj["waiting"]] if obj.get("waiting") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_flag.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_flag.py new file mode 100644 index 000000000000..f2442a1035c4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_flag.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1NodeFlag(BaseModel): + """ + IoArgoprojWorkflowV1alpha1NodeFlag + """ # noqa: E501 + hooked: Optional[StrictBool] = Field(default=None, description="Hooked tracks whether or not this node was triggered by hook or onExit") + retried: Optional[StrictBool] = Field(default=None, description="Retried tracks whether or not this node was retried by retryStrategy") + __properties: ClassVar[List[str]] = ["hooked", "retried"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1NodeFlag from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1NodeFlag from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "hooked": obj.get("hooked"), + "retried": obj.get("retried") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_result.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_result.py new file mode 100644 index 000000000000..8fdafce60743 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_result.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1NodeResult(BaseModel): + """ + IoArgoprojWorkflowV1alpha1NodeResult + """ # noqa: E501 + message: Optional[StrictStr] = None + outputs: Optional[IoArgoprojWorkflowV1alpha1Outputs] = None + phase: Optional[StrictStr] = None + progress: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["message", "outputs", "phase", "progress"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1NodeResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of outputs + if self.outputs: + _dict['outputs'] = self.outputs.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1NodeResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "message": obj.get("message"), + "outputs": IoArgoprojWorkflowV1alpha1Outputs.from_dict(obj["outputs"]) if obj.get("outputs") is not None else None, + "phase": obj.get("phase"), + "progress": obj.get("progress") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_status.py new file mode 100644 index 000000000000..442eb6eef2f1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_status.py @@ -0,0 +1,160 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_inputs import IoArgoprojWorkflowV1alpha1Inputs +from argo_workflows.models.io_argoproj_workflow_v1alpha1_memoization_status import IoArgoprojWorkflowV1alpha1MemoizationStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_flag import IoArgoprojWorkflowV1alpha1NodeFlag +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_synchronization_status import IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1NodeStatus(BaseModel): + """ + NodeStatus contains status information about an individual node in the workflow + """ # noqa: E501 + boundary_id: Optional[StrictStr] = Field(default=None, description="BoundaryID indicates the node ID of the associated template root node in which this node belongs to", alias="boundaryID") + children: Optional[List[StrictStr]] = Field(default=None, description="Children is a list of child node IDs") + daemoned: Optional[StrictBool] = Field(default=None, description="Daemoned tracks whether or not this node was daemoned and need to be terminated") + display_name: Optional[StrictStr] = Field(default=None, description="DisplayName is a human readable representation of the node. Unique within a template boundary", alias="displayName") + estimated_duration: Optional[StrictInt] = Field(default=None, description="EstimatedDuration in seconds.", alias="estimatedDuration") + finished_at: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="finishedAt") + host_node_name: Optional[StrictStr] = Field(default=None, description="HostNodeName name of the Kubernetes node on which the Pod is running, if applicable", alias="hostNodeName") + id: StrictStr = Field(description="ID is a unique identifier of a node within the worklow It is implemented as a hash of the node name, which makes the ID deterministic") + inputs: Optional[IoArgoprojWorkflowV1alpha1Inputs] = None + memoization_status: Optional[IoArgoprojWorkflowV1alpha1MemoizationStatus] = Field(default=None, alias="memoizationStatus") + message: Optional[StrictStr] = Field(default=None, description="A human readable message indicating details about why the node is in this condition.") + name: StrictStr = Field(description="Name is unique name in the node tree used to generate the node ID") + node_flag: Optional[IoArgoprojWorkflowV1alpha1NodeFlag] = Field(default=None, alias="nodeFlag") + outbound_nodes: Optional[List[StrictStr]] = Field(default=None, description="OutboundNodes tracks the node IDs which are considered \"outbound\" nodes to a template invocation. For every invocation of a template, there are nodes which we considered as \"outbound\". Essentially, these are last nodes in the execution sequence to run, before the template is considered completed. These nodes are then connected as parents to a following step. In the case of single pod steps (i.e. container, script, resource templates), this list will be nil since the pod itself is already considered the \"outbound\" node. In the case of DAGs, outbound nodes are the \"target\" tasks (tasks with no children). In the case of steps, outbound nodes are all the containers involved in the last step group. NOTE: since templates are composable, the list of outbound nodes are carried upwards when a DAG/steps template invokes another DAG/steps template. In other words, the outbound nodes of a template, will be a superset of the outbound nodes of its last children.", alias="outboundNodes") + outputs: Optional[IoArgoprojWorkflowV1alpha1Outputs] = None + phase: Optional[StrictStr] = Field(default=None, description="Phase a simple, high-level summary of where the node is in its lifecycle. Can be used as a state machine. Will be one of these values \"Pending\", \"Running\" before the node is completed, or \"Succeeded\", \"Skipped\", \"Failed\", \"Error\", or \"Omitted\" as a final state.") + pod_ip: Optional[StrictStr] = Field(default=None, description="PodIP captures the IP of the pod for daemoned steps", alias="podIP") + progress: Optional[StrictStr] = Field(default=None, description="Progress to completion") + resources_duration: Optional[Dict[str, StrictInt]] = Field(default=None, description="ResourcesDuration is indicative, but not accurate, resource duration. This is populated when the nodes completes.", alias="resourcesDuration") + started_at: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="startedAt") + synchronization_status: Optional[IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus] = Field(default=None, alias="synchronizationStatus") + template_name: Optional[StrictStr] = Field(default=None, description="TemplateName is the template name which this node corresponds to. Not applicable to virtual nodes (e.g. Retry, StepGroup)", alias="templateName") + template_ref: Optional[IoArgoprojWorkflowV1alpha1TemplateRef] = Field(default=None, alias="templateRef") + template_scope: Optional[StrictStr] = Field(default=None, description="TemplateScope is the template scope in which the template of this node was retrieved.", alias="templateScope") + type: StrictStr = Field(description="Type indicates type of node") + __properties: ClassVar[List[str]] = ["boundaryID", "children", "daemoned", "displayName", "estimatedDuration", "finishedAt", "hostNodeName", "id", "inputs", "memoizationStatus", "message", "name", "nodeFlag", "outboundNodes", "outputs", "phase", "podIP", "progress", "resourcesDuration", "startedAt", "synchronizationStatus", "templateName", "templateRef", "templateScope", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1NodeStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of inputs + if self.inputs: + _dict['inputs'] = self.inputs.to_dict() + # override the default output from pydantic by calling `to_dict()` of memoization_status + if self.memoization_status: + _dict['memoizationStatus'] = self.memoization_status.to_dict() + # override the default output from pydantic by calling `to_dict()` of node_flag + if self.node_flag: + _dict['nodeFlag'] = self.node_flag.to_dict() + # override the default output from pydantic by calling `to_dict()` of outputs + if self.outputs: + _dict['outputs'] = self.outputs.to_dict() + # override the default output from pydantic by calling `to_dict()` of synchronization_status + if self.synchronization_status: + _dict['synchronizationStatus'] = self.synchronization_status.to_dict() + # override the default output from pydantic by calling `to_dict()` of template_ref + if self.template_ref: + _dict['templateRef'] = self.template_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1NodeStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "boundaryID": obj.get("boundaryID"), + "children": obj.get("children"), + "daemoned": obj.get("daemoned"), + "displayName": obj.get("displayName"), + "estimatedDuration": obj.get("estimatedDuration"), + "finishedAt": obj.get("finishedAt"), + "hostNodeName": obj.get("hostNodeName"), + "id": obj.get("id"), + "inputs": IoArgoprojWorkflowV1alpha1Inputs.from_dict(obj["inputs"]) if obj.get("inputs") is not None else None, + "memoizationStatus": IoArgoprojWorkflowV1alpha1MemoizationStatus.from_dict(obj["memoizationStatus"]) if obj.get("memoizationStatus") is not None else None, + "message": obj.get("message"), + "name": obj.get("name"), + "nodeFlag": IoArgoprojWorkflowV1alpha1NodeFlag.from_dict(obj["nodeFlag"]) if obj.get("nodeFlag") is not None else None, + "outboundNodes": obj.get("outboundNodes"), + "outputs": IoArgoprojWorkflowV1alpha1Outputs.from_dict(obj["outputs"]) if obj.get("outputs") is not None else None, + "phase": obj.get("phase"), + "podIP": obj.get("podIP"), + "progress": obj.get("progress"), + "resourcesDuration": obj.get("resourcesDuration"), + "startedAt": obj.get("startedAt"), + "synchronizationStatus": IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus.from_dict(obj["synchronizationStatus"]) if obj.get("synchronizationStatus") is not None else None, + "templateName": obj.get("templateName"), + "templateRef": IoArgoprojWorkflowV1alpha1TemplateRef.from_dict(obj["templateRef"]) if obj.get("templateRef") is not None else None, + "templateScope": obj.get("templateScope"), + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_synchronization_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_synchronization_status.py new file mode 100644 index 000000000000..a1c22e1b5dab --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_node_synchronization_status.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus(BaseModel): + """ + NodeSynchronizationStatus stores the status of a node + """ # noqa: E501 + waiting: Optional[StrictStr] = Field(default=None, description="Waiting is the name of the lock that this node is waiting for") + __properties: ClassVar[List[str]] = ["waiting"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "waiting": obj.get("waiting") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_o_auth2_auth.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_o_auth2_auth.py new file mode 100644 index 000000000000..9f1c5091a134 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_o_auth2_auth.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param import IoArgoprojWorkflowV1alpha1OAuth2EndpointParam +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1OAuth2Auth(BaseModel): + """ + OAuth2Auth holds all information for client authentication via OAuth2 tokens + """ # noqa: E501 + client_id_secret: Optional[SecretKeySelector] = Field(default=None, alias="clientIDSecret") + client_secret_secret: Optional[SecretKeySelector] = Field(default=None, alias="clientSecretSecret") + endpoint_params: Optional[List[IoArgoprojWorkflowV1alpha1OAuth2EndpointParam]] = Field(default=None, alias="endpointParams") + scopes: Optional[List[StrictStr]] = None + token_url_secret: Optional[SecretKeySelector] = Field(default=None, alias="tokenURLSecret") + __properties: ClassVar[List[str]] = ["clientIDSecret", "clientSecretSecret", "endpointParams", "scopes", "tokenURLSecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OAuth2Auth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of client_id_secret + if self.client_id_secret: + _dict['clientIDSecret'] = self.client_id_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of client_secret_secret + if self.client_secret_secret: + _dict['clientSecretSecret'] = self.client_secret_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in endpoint_params (list) + _items = [] + if self.endpoint_params: + for _item in self.endpoint_params: + if _item: + _items.append(_item.to_dict()) + _dict['endpointParams'] = _items + # override the default output from pydantic by calling `to_dict()` of token_url_secret + if self.token_url_secret: + _dict['tokenURLSecret'] = self.token_url_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OAuth2Auth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "clientIDSecret": SecretKeySelector.from_dict(obj["clientIDSecret"]) if obj.get("clientIDSecret") is not None else None, + "clientSecretSecret": SecretKeySelector.from_dict(obj["clientSecretSecret"]) if obj.get("clientSecretSecret") is not None else None, + "endpointParams": [IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.from_dict(_item) for _item in obj["endpointParams"]] if obj.get("endpointParams") is not None else None, + "scopes": obj.get("scopes"), + "tokenURLSecret": SecretKeySelector.from_dict(obj["tokenURLSecret"]) if obj.get("tokenURLSecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py new file mode 100644 index 000000000000..daddf9b3c331 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1OAuth2EndpointParam(BaseModel): + """ + EndpointParam is for requesting optional fields that should be sent in the oauth request + """ # noqa: E501 + key: StrictStr = Field(description="Name is the header name") + value: Optional[StrictStr] = Field(default=None, description="Value is the literal value to use for the header") + __properties: ClassVar[List[str]] = ["key", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OAuth2EndpointParam from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OAuth2EndpointParam from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_artifact.py new file mode 100644 index 000000000000..9d823e9c1407 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_artifact.py @@ -0,0 +1,114 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_lifecycle_rule import IoArgoprojWorkflowV1alpha1OSSLifecycleRule +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1OSSArtifact(BaseModel): + """ + OSSArtifact is the location of an Alibaba Cloud OSS artifact + """ # noqa: E501 + access_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="accessKeySecret") + bucket: Optional[StrictStr] = Field(default=None, description="Bucket is the name of the bucket") + create_bucket_if_not_present: Optional[StrictBool] = Field(default=None, description="CreateBucketIfNotPresent tells the driver to attempt to create the OSS bucket for output artifacts, if it doesn't exist", alias="createBucketIfNotPresent") + endpoint: Optional[StrictStr] = Field(default=None, description="Endpoint is the hostname of the bucket endpoint") + key: StrictStr = Field(description="Key is the path in the bucket where the artifact resides") + lifecycle_rule: Optional[IoArgoprojWorkflowV1alpha1OSSLifecycleRule] = Field(default=None, alias="lifecycleRule") + secret_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="secretKeySecret") + security_token: Optional[StrictStr] = Field(default=None, description="SecurityToken is the user's temporary security token. For more details, check out: https://www.alibabacloud.com/help/doc-detail/100624.htm", alias="securityToken") + use_sdk_creds: Optional[StrictBool] = Field(default=None, description="UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", alias="useSDKCreds") + __properties: ClassVar[List[str]] = ["accessKeySecret", "bucket", "createBucketIfNotPresent", "endpoint", "key", "lifecycleRule", "secretKeySecret", "securityToken", "useSDKCreds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OSSArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_key_secret + if self.access_key_secret: + _dict['accessKeySecret'] = self.access_key_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of lifecycle_rule + if self.lifecycle_rule: + _dict['lifecycleRule'] = self.lifecycle_rule.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_key_secret + if self.secret_key_secret: + _dict['secretKeySecret'] = self.secret_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OSSArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessKeySecret": SecretKeySelector.from_dict(obj["accessKeySecret"]) if obj.get("accessKeySecret") is not None else None, + "bucket": obj.get("bucket"), + "createBucketIfNotPresent": obj.get("createBucketIfNotPresent"), + "endpoint": obj.get("endpoint"), + "key": obj.get("key"), + "lifecycleRule": IoArgoprojWorkflowV1alpha1OSSLifecycleRule.from_dict(obj["lifecycleRule"]) if obj.get("lifecycleRule") is not None else None, + "secretKeySecret": SecretKeySelector.from_dict(obj["secretKeySecret"]) if obj.get("secretKeySecret") is not None else None, + "securityToken": obj.get("securityToken"), + "useSDKCreds": obj.get("useSDKCreds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_artifact_repository.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_artifact_repository.py new file mode 100644 index 000000000000..5c74355d2e9e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_artifact_repository.py @@ -0,0 +1,114 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_lifecycle_rule import IoArgoprojWorkflowV1alpha1OSSLifecycleRule +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1OSSArtifactRepository(BaseModel): + """ + OSSArtifactRepository defines the controller configuration for an OSS artifact repository + """ # noqa: E501 + access_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="accessKeySecret") + bucket: Optional[StrictStr] = Field(default=None, description="Bucket is the name of the bucket") + create_bucket_if_not_present: Optional[StrictBool] = Field(default=None, description="CreateBucketIfNotPresent tells the driver to attempt to create the OSS bucket for output artifacts, if it doesn't exist", alias="createBucketIfNotPresent") + endpoint: Optional[StrictStr] = Field(default=None, description="Endpoint is the hostname of the bucket endpoint") + key_format: Optional[StrictStr] = Field(default=None, description="KeyFormat defines the format of how to store keys and can reference workflow variables.", alias="keyFormat") + lifecycle_rule: Optional[IoArgoprojWorkflowV1alpha1OSSLifecycleRule] = Field(default=None, alias="lifecycleRule") + secret_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="secretKeySecret") + security_token: Optional[StrictStr] = Field(default=None, description="SecurityToken is the user's temporary security token. For more details, check out: https://www.alibabacloud.com/help/doc-detail/100624.htm", alias="securityToken") + use_sdk_creds: Optional[StrictBool] = Field(default=None, description="UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", alias="useSDKCreds") + __properties: ClassVar[List[str]] = ["accessKeySecret", "bucket", "createBucketIfNotPresent", "endpoint", "keyFormat", "lifecycleRule", "secretKeySecret", "securityToken", "useSDKCreds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OSSArtifactRepository from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_key_secret + if self.access_key_secret: + _dict['accessKeySecret'] = self.access_key_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of lifecycle_rule + if self.lifecycle_rule: + _dict['lifecycleRule'] = self.lifecycle_rule.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_key_secret + if self.secret_key_secret: + _dict['secretKeySecret'] = self.secret_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OSSArtifactRepository from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessKeySecret": SecretKeySelector.from_dict(obj["accessKeySecret"]) if obj.get("accessKeySecret") is not None else None, + "bucket": obj.get("bucket"), + "createBucketIfNotPresent": obj.get("createBucketIfNotPresent"), + "endpoint": obj.get("endpoint"), + "keyFormat": obj.get("keyFormat"), + "lifecycleRule": IoArgoprojWorkflowV1alpha1OSSLifecycleRule.from_dict(obj["lifecycleRule"]) if obj.get("lifecycleRule") is not None else None, + "secretKeySecret": SecretKeySelector.from_dict(obj["secretKeySecret"]) if obj.get("secretKeySecret") is not None else None, + "securityToken": obj.get("securityToken"), + "useSDKCreds": obj.get("useSDKCreds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_lifecycle_rule.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_lifecycle_rule.py new file mode 100644 index 000000000000..f14bcc8cf7ae --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_oss_lifecycle_rule.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1OSSLifecycleRule(BaseModel): + """ + OSSLifecycleRule specifies how to manage bucket's lifecycle + """ # noqa: E501 + mark_deletion_after_days: Optional[StrictInt] = Field(default=None, description="MarkDeletionAfterDays is the number of days before we delete objects in the bucket", alias="markDeletionAfterDays") + mark_infrequent_access_after_days: Optional[StrictInt] = Field(default=None, description="MarkInfrequentAccessAfterDays is the number of days before we convert the objects in the bucket to Infrequent Access (IA) storage type", alias="markInfrequentAccessAfterDays") + __properties: ClassVar[List[str]] = ["markDeletionAfterDays", "markInfrequentAccessAfterDays"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OSSLifecycleRule from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1OSSLifecycleRule from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "markDeletionAfterDays": obj.get("markDeletionAfterDays"), + "markInfrequentAccessAfterDays": obj.get("markInfrequentAccessAfterDays") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_outputs.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_outputs.py new file mode 100644 index 000000000000..84485574a313 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_outputs.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Outputs(BaseModel): + """ + Outputs hold parameters, artifacts, and results from a step + """ # noqa: E501 + artifacts: Optional[List[IoArgoprojWorkflowV1alpha1Artifact]] = Field(default=None, description="Artifacts holds the list of output artifacts produced by a step") + exit_code: Optional[StrictStr] = Field(default=None, description="ExitCode holds the exit code of a script template", alias="exitCode") + parameters: Optional[List[IoArgoprojWorkflowV1alpha1Parameter]] = Field(default=None, description="Parameters holds the list of output parameters produced by a step") + result: Optional[StrictStr] = Field(default=None, description="Result holds the result (stdout) of a script template") + __properties: ClassVar[List[str]] = ["artifacts", "exitCode", "parameters", "result"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Outputs from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in artifacts (list) + _items = [] + if self.artifacts: + for _item in self.artifacts: + if _item: + _items.append(_item.to_dict()) + _dict['artifacts'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in parameters (list) + _items = [] + if self.parameters: + for _item in self.parameters: + if _item: + _items.append(_item.to_dict()) + _dict['parameters'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Outputs from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifacts": [IoArgoprojWorkflowV1alpha1Artifact.from_dict(_item) for _item in obj["artifacts"]] if obj.get("artifacts") is not None else None, + "exitCode": obj.get("exitCode"), + "parameters": [IoArgoprojWorkflowV1alpha1Parameter.from_dict(_item) for _item in obj["parameters"]] if obj.get("parameters") is not None else None, + "result": obj.get("result") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_parallel_steps.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_parallel_steps.py new file mode 100644 index 000000000000..41e1715652c8 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_parallel_steps.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import ConfigDict +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.null import null +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ParallelSteps(null): + """ + IoArgoprojWorkflowV1alpha1ParallelSteps + """ # noqa: E501 + __properties: ClassVar[List[str]] = [] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ParallelSteps from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ParallelSteps from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_parameter.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_parameter.py new file mode 100644 index 000000000000..634425eb54f2 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_parameter.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_value_from import IoArgoprojWorkflowV1alpha1ValueFrom +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Parameter(BaseModel): + """ + Parameter indicate a passed string parameter to a service template with an optional default value + """ # noqa: E501 + default: Optional[StrictStr] = Field(default=None, description="Default is the default value to use for an input parameter if a value was not supplied") + description: Optional[StrictStr] = Field(default=None, description="Description is the parameter description") + enum: Optional[List[StrictStr]] = Field(default=None, description="Enum holds a list of string values to choose from, for the actual value of the parameter") + global_name: Optional[StrictStr] = Field(default=None, description="GlobalName exports an output parameter to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.parameters.XXXX}} and in workflow.status.outputs.parameters", alias="globalName") + name: StrictStr = Field(description="Name is the parameter name") + value: Optional[StrictStr] = Field(default=None, description="Value is the literal value to use for the parameter. If specified in the context of an input parameter, the value takes precedence over any passed values") + value_from: Optional[IoArgoprojWorkflowV1alpha1ValueFrom] = Field(default=None, alias="valueFrom") + __properties: ClassVar[List[str]] = ["default", "description", "enum", "globalName", "name", "value", "valueFrom"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Parameter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of value_from + if self.value_from: + _dict['valueFrom'] = self.value_from.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Parameter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "default": obj.get("default"), + "description": obj.get("description"), + "enum": obj.get("enum"), + "globalName": obj.get("globalName"), + "name": obj.get("name"), + "value": obj.get("value"), + "valueFrom": IoArgoprojWorkflowV1alpha1ValueFrom.from_dict(obj["valueFrom"]) if obj.get("valueFrom") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_pod_gc.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_pod_gc.py new file mode 100644 index 000000000000..8526ba491206 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_pod_gc.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.duration import Duration +from argo_workflows.models.label_selector import LabelSelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1PodGC(BaseModel): + """ + PodGC describes how to delete completed pods as they complete + """ # noqa: E501 + delete_delay_duration: Optional[Duration] = Field(default=None, alias="deleteDelayDuration") + label_selector: Optional[LabelSelector] = Field(default=None, alias="labelSelector") + strategy: Optional[StrictStr] = Field(default=None, description="Strategy is the strategy to use. One of \"OnPodCompletion\", \"OnPodSuccess\", \"OnWorkflowCompletion\", \"OnWorkflowSuccess\". If unset, does not delete Pods") + __properties: ClassVar[List[str]] = ["deleteDelayDuration", "labelSelector", "strategy"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1PodGC from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of delete_delay_duration + if self.delete_delay_duration: + _dict['deleteDelayDuration'] = self.delete_delay_duration.to_dict() + # override the default output from pydantic by calling `to_dict()` of label_selector + if self.label_selector: + _dict['labelSelector'] = self.label_selector.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1PodGC from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "deleteDelayDuration": Duration.from_dict(obj["deleteDelayDuration"]) if obj.get("deleteDelayDuration") is not None else None, + "labelSelector": LabelSelector.from_dict(obj["labelSelector"]) if obj.get("labelSelector") is not None else None, + "strategy": obj.get("strategy") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_prometheus.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_prometheus.py new file mode 100644 index 000000000000..9c63076df7f7 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_prometheus.py @@ -0,0 +1,119 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_counter import IoArgoprojWorkflowV1alpha1Counter +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gauge import IoArgoprojWorkflowV1alpha1Gauge +from argo_workflows.models.io_argoproj_workflow_v1alpha1_histogram import IoArgoprojWorkflowV1alpha1Histogram +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metric_label import IoArgoprojWorkflowV1alpha1MetricLabel +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Prometheus(BaseModel): + """ + Prometheus is a prometheus metric to be emitted + """ # noqa: E501 + counter: Optional[IoArgoprojWorkflowV1alpha1Counter] = None + gauge: Optional[IoArgoprojWorkflowV1alpha1Gauge] = None + help: StrictStr = Field(description="Help is a string that describes the metric") + histogram: Optional[IoArgoprojWorkflowV1alpha1Histogram] = None + labels: Optional[List[IoArgoprojWorkflowV1alpha1MetricLabel]] = Field(default=None, description="Labels is a list of metric labels") + name: StrictStr = Field(description="Name is the name of the metric") + when: Optional[StrictStr] = Field(default=None, description="When is a conditional statement that decides when to emit the metric") + __properties: ClassVar[List[str]] = ["counter", "gauge", "help", "histogram", "labels", "name", "when"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Prometheus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of counter + if self.counter: + _dict['counter'] = self.counter.to_dict() + # override the default output from pydantic by calling `to_dict()` of gauge + if self.gauge: + _dict['gauge'] = self.gauge.to_dict() + # override the default output from pydantic by calling `to_dict()` of histogram + if self.histogram: + _dict['histogram'] = self.histogram.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in labels (list) + _items = [] + if self.labels: + for _item in self.labels: + if _item: + _items.append(_item.to_dict()) + _dict['labels'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Prometheus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "counter": IoArgoprojWorkflowV1alpha1Counter.from_dict(obj["counter"]) if obj.get("counter") is not None else None, + "gauge": IoArgoprojWorkflowV1alpha1Gauge.from_dict(obj["gauge"]) if obj.get("gauge") is not None else None, + "help": obj.get("help"), + "histogram": IoArgoprojWorkflowV1alpha1Histogram.from_dict(obj["histogram"]) if obj.get("histogram") is not None else None, + "labels": [IoArgoprojWorkflowV1alpha1MetricLabel.from_dict(_item) for _item in obj["labels"]] if obj.get("labels") is not None else None, + "name": obj.get("name"), + "when": obj.get("when") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_raw_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_raw_artifact.py new file mode 100644 index 000000000000..cc16083aa9da --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_raw_artifact.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1RawArtifact(BaseModel): + """ + RawArtifact allows raw string content to be placed as an artifact in a container + """ # noqa: E501 + data: StrictStr = Field(description="Data is the string contents of the artifact") + __properties: ClassVar[List[str]] = ["data"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1RawArtifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1RawArtifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "data": obj.get("data") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_resource_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_resource_template.py new file mode 100644 index 000000000000..b1237267530e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_resource_template.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_manifest_from import IoArgoprojWorkflowV1alpha1ManifestFrom +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ResourceTemplate(BaseModel): + """ + ResourceTemplate is a template subtype to manipulate kubernetes resources + """ # noqa: E501 + action: StrictStr = Field(description="Action is the action to perform to the resource. Must be one of: get, create, apply, delete, replace, patch") + failure_condition: Optional[StrictStr] = Field(default=None, description="FailureCondition is a label selector expression which describes the conditions of the k8s resource in which the step was considered failed", alias="failureCondition") + flags: Optional[List[StrictStr]] = Field(default=None, description="Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ \"--validate=false\" # disable resource validation ]") + manifest: Optional[StrictStr] = Field(default=None, description="Manifest contains the kubernetes manifest") + manifest_from: Optional[IoArgoprojWorkflowV1alpha1ManifestFrom] = Field(default=None, alias="manifestFrom") + merge_strategy: Optional[StrictStr] = Field(default=None, description="MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json", alias="mergeStrategy") + set_owner_reference: Optional[StrictBool] = Field(default=None, description="SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource.", alias="setOwnerReference") + success_condition: Optional[StrictStr] = Field(default=None, description="SuccessCondition is a label selector expression which describes the conditions of the k8s resource in which it is acceptable to proceed to the following step", alias="successCondition") + __properties: ClassVar[List[str]] = ["action", "failureCondition", "flags", "manifest", "manifestFrom", "mergeStrategy", "setOwnerReference", "successCondition"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ResourceTemplate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of manifest_from + if self.manifest_from: + _dict['manifestFrom'] = self.manifest_from.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ResourceTemplate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "action": obj.get("action"), + "failureCondition": obj.get("failureCondition"), + "flags": obj.get("flags"), + "manifest": obj.get("manifest"), + "manifestFrom": IoArgoprojWorkflowV1alpha1ManifestFrom.from_dict(obj["manifestFrom"]) if obj.get("manifestFrom") is not None else None, + "mergeStrategy": obj.get("mergeStrategy"), + "setOwnerReference": obj.get("setOwnerReference"), + "successCondition": obj.get("successCondition") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py new file mode 100644 index 000000000000..3a962d732a57 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest + """ # noqa: E501 + memoized: Optional[StrictBool] = None + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + parameters: Optional[List[StrictStr]] = None + uid: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["memoized", "name", "namespace", "parameters", "uid"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "memoized": obj.get("memoized"), + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "parameters": obj.get("parameters"), + "uid": obj.get("uid") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_affinity.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_affinity.py new file mode 100644 index 000000000000..377d9652ab16 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_affinity.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1RetryAffinity(BaseModel): + """ + RetryAffinity prevents running steps on the same host. + """ # noqa: E501 + node_anti_affinity: Optional[Dict[str, Any]] = Field(default=None, description="RetryNodeAntiAffinity is a placeholder for future expansion, only empty nodeAntiAffinity is allowed. In order to prevent running steps on the same host, it uses \"kubernetes.io/hostname\".", alias="nodeAntiAffinity") + __properties: ClassVar[List[str]] = ["nodeAntiAffinity"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1RetryAffinity from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1RetryAffinity from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeAntiAffinity": obj.get("nodeAntiAffinity") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py new file mode 100644 index 000000000000..86afb1d6b93e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest + """ # noqa: E501 + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + node_field_selector: Optional[StrictStr] = Field(default=None, alias="nodeFieldSelector") + parameters: Optional[List[StrictStr]] = None + restart_successful: Optional[StrictBool] = Field(default=None, alias="restartSuccessful") + uid: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name", "namespace", "nodeFieldSelector", "parameters", "restartSuccessful", "uid"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "nodeFieldSelector": obj.get("nodeFieldSelector"), + "parameters": obj.get("parameters"), + "restartSuccessful": obj.get("restartSuccessful"), + "uid": obj.get("uid") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_strategy.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_strategy.py new file mode 100644 index 000000000000..4935daf645f3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_retry_strategy.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_backoff import IoArgoprojWorkflowV1alpha1Backoff +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_affinity import IoArgoprojWorkflowV1alpha1RetryAffinity +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1RetryStrategy(BaseModel): + """ + RetryStrategy provides controls on how to retry a workflow step + """ # noqa: E501 + affinity: Optional[IoArgoprojWorkflowV1alpha1RetryAffinity] = None + backoff: Optional[IoArgoprojWorkflowV1alpha1Backoff] = None + expression: Optional[StrictStr] = Field(default=None, description="Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored") + limit: Optional[StrictStr] = None + retry_policy: Optional[StrictStr] = Field(default=None, description="RetryPolicy is a policy of NodePhase statuses that will be retried", alias="retryPolicy") + __properties: ClassVar[List[str]] = ["affinity", "backoff", "expression", "limit", "retryPolicy"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1RetryStrategy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of affinity + if self.affinity: + _dict['affinity'] = self.affinity.to_dict() + # override the default output from pydantic by calling `to_dict()` of backoff + if self.backoff: + _dict['backoff'] = self.backoff.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1RetryStrategy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "affinity": IoArgoprojWorkflowV1alpha1RetryAffinity.from_dict(obj["affinity"]) if obj.get("affinity") is not None else None, + "backoff": IoArgoprojWorkflowV1alpha1Backoff.from_dict(obj["backoff"]) if obj.get("backoff") is not None else None, + "expression": obj.get("expression"), + "limit": obj.get("limit"), + "retryPolicy": obj.get("retryPolicy") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_artifact.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_artifact.py new file mode 100644 index 000000000000..55b4ce41303f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_artifact.py @@ -0,0 +1,127 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_s3_bucket_options import IoArgoprojWorkflowV1alpha1CreateS3BucketOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_encryption_options import IoArgoprojWorkflowV1alpha1S3EncryptionOptions +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1S3Artifact(BaseModel): + """ + S3Artifact is the location of an S3 artifact + """ # noqa: E501 + access_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="accessKeySecret") + bucket: Optional[StrictStr] = Field(default=None, description="Bucket is the name of the bucket") + ca_secret: Optional[SecretKeySelector] = Field(default=None, alias="caSecret") + create_bucket_if_not_present: Optional[IoArgoprojWorkflowV1alpha1CreateS3BucketOptions] = Field(default=None, alias="createBucketIfNotPresent") + encryption_options: Optional[IoArgoprojWorkflowV1alpha1S3EncryptionOptions] = Field(default=None, alias="encryptionOptions") + endpoint: Optional[StrictStr] = Field(default=None, description="Endpoint is the hostname of the bucket endpoint") + insecure: Optional[StrictBool] = Field(default=None, description="Insecure will connect to the service with TLS") + key: Optional[StrictStr] = Field(default=None, description="Key is the key in the bucket where the artifact resides") + region: Optional[StrictStr] = Field(default=None, description="Region contains the optional bucket region") + role_arn: Optional[StrictStr] = Field(default=None, description="RoleARN is the Amazon Resource Name (ARN) of the role to assume.", alias="roleARN") + secret_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="secretKeySecret") + use_sdk_creds: Optional[StrictBool] = Field(default=None, description="UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", alias="useSDKCreds") + __properties: ClassVar[List[str]] = ["accessKeySecret", "bucket", "caSecret", "createBucketIfNotPresent", "encryptionOptions", "endpoint", "insecure", "key", "region", "roleARN", "secretKeySecret", "useSDKCreds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1S3Artifact from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_key_secret + if self.access_key_secret: + _dict['accessKeySecret'] = self.access_key_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of ca_secret + if self.ca_secret: + _dict['caSecret'] = self.ca_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of create_bucket_if_not_present + if self.create_bucket_if_not_present: + _dict['createBucketIfNotPresent'] = self.create_bucket_if_not_present.to_dict() + # override the default output from pydantic by calling `to_dict()` of encryption_options + if self.encryption_options: + _dict['encryptionOptions'] = self.encryption_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_key_secret + if self.secret_key_secret: + _dict['secretKeySecret'] = self.secret_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1S3Artifact from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessKeySecret": SecretKeySelector.from_dict(obj["accessKeySecret"]) if obj.get("accessKeySecret") is not None else None, + "bucket": obj.get("bucket"), + "caSecret": SecretKeySelector.from_dict(obj["caSecret"]) if obj.get("caSecret") is not None else None, + "createBucketIfNotPresent": IoArgoprojWorkflowV1alpha1CreateS3BucketOptions.from_dict(obj["createBucketIfNotPresent"]) if obj.get("createBucketIfNotPresent") is not None else None, + "encryptionOptions": IoArgoprojWorkflowV1alpha1S3EncryptionOptions.from_dict(obj["encryptionOptions"]) if obj.get("encryptionOptions") is not None else None, + "endpoint": obj.get("endpoint"), + "insecure": obj.get("insecure"), + "key": obj.get("key"), + "region": obj.get("region"), + "roleARN": obj.get("roleARN"), + "secretKeySecret": SecretKeySelector.from_dict(obj["secretKeySecret"]) if obj.get("secretKeySecret") is not None else None, + "useSDKCreds": obj.get("useSDKCreds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_artifact_repository.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_artifact_repository.py new file mode 100644 index 000000000000..13ca12a92c72 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_artifact_repository.py @@ -0,0 +1,129 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_s3_bucket_options import IoArgoprojWorkflowV1alpha1CreateS3BucketOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_encryption_options import IoArgoprojWorkflowV1alpha1S3EncryptionOptions +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1S3ArtifactRepository(BaseModel): + """ + S3ArtifactRepository defines the controller configuration for an S3 artifact repository + """ # noqa: E501 + access_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="accessKeySecret") + bucket: Optional[StrictStr] = Field(default=None, description="Bucket is the name of the bucket") + ca_secret: Optional[SecretKeySelector] = Field(default=None, alias="caSecret") + create_bucket_if_not_present: Optional[IoArgoprojWorkflowV1alpha1CreateS3BucketOptions] = Field(default=None, alias="createBucketIfNotPresent") + encryption_options: Optional[IoArgoprojWorkflowV1alpha1S3EncryptionOptions] = Field(default=None, alias="encryptionOptions") + endpoint: Optional[StrictStr] = Field(default=None, description="Endpoint is the hostname of the bucket endpoint") + insecure: Optional[StrictBool] = Field(default=None, description="Insecure will connect to the service with TLS") + key_format: Optional[StrictStr] = Field(default=None, description="KeyFormat defines the format of how to store keys and can reference workflow variables.", alias="keyFormat") + key_prefix: Optional[StrictStr] = Field(default=None, description="KeyPrefix is prefix used as part of the bucket key in which the controller will store artifacts. DEPRECATED. Use KeyFormat instead", alias="keyPrefix") + region: Optional[StrictStr] = Field(default=None, description="Region contains the optional bucket region") + role_arn: Optional[StrictStr] = Field(default=None, description="RoleARN is the Amazon Resource Name (ARN) of the role to assume.", alias="roleARN") + secret_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="secretKeySecret") + use_sdk_creds: Optional[StrictBool] = Field(default=None, description="UseSDKCreds tells the driver to figure out credentials based on sdk defaults.", alias="useSDKCreds") + __properties: ClassVar[List[str]] = ["accessKeySecret", "bucket", "caSecret", "createBucketIfNotPresent", "encryptionOptions", "endpoint", "insecure", "keyFormat", "keyPrefix", "region", "roleARN", "secretKeySecret", "useSDKCreds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1S3ArtifactRepository from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of access_key_secret + if self.access_key_secret: + _dict['accessKeySecret'] = self.access_key_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of ca_secret + if self.ca_secret: + _dict['caSecret'] = self.ca_secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of create_bucket_if_not_present + if self.create_bucket_if_not_present: + _dict['createBucketIfNotPresent'] = self.create_bucket_if_not_present.to_dict() + # override the default output from pydantic by calling `to_dict()` of encryption_options + if self.encryption_options: + _dict['encryptionOptions'] = self.encryption_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret_key_secret + if self.secret_key_secret: + _dict['secretKeySecret'] = self.secret_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1S3ArtifactRepository from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessKeySecret": SecretKeySelector.from_dict(obj["accessKeySecret"]) if obj.get("accessKeySecret") is not None else None, + "bucket": obj.get("bucket"), + "caSecret": SecretKeySelector.from_dict(obj["caSecret"]) if obj.get("caSecret") is not None else None, + "createBucketIfNotPresent": IoArgoprojWorkflowV1alpha1CreateS3BucketOptions.from_dict(obj["createBucketIfNotPresent"]) if obj.get("createBucketIfNotPresent") is not None else None, + "encryptionOptions": IoArgoprojWorkflowV1alpha1S3EncryptionOptions.from_dict(obj["encryptionOptions"]) if obj.get("encryptionOptions") is not None else None, + "endpoint": obj.get("endpoint"), + "insecure": obj.get("insecure"), + "keyFormat": obj.get("keyFormat"), + "keyPrefix": obj.get("keyPrefix"), + "region": obj.get("region"), + "roleARN": obj.get("roleARN"), + "secretKeySecret": SecretKeySelector.from_dict(obj["secretKeySecret"]) if obj.get("secretKeySecret") is not None else None, + "useSDKCreds": obj.get("useSDKCreds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_encryption_options.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_encryption_options.py new file mode 100644 index 000000000000..4b06e68ed944 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_s3_encryption_options.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.secret_key_selector import SecretKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1S3EncryptionOptions(BaseModel): + """ + S3EncryptionOptions used to determine encryption options during s3 operations + """ # noqa: E501 + enable_encryption: Optional[StrictBool] = Field(default=None, description="EnableEncryption tells the driver to encrypt objects if set to true. If kmsKeyId and serverSideCustomerKeySecret are not set, SSE-S3 will be used", alias="enableEncryption") + kms_encryption_context: Optional[StrictStr] = Field(default=None, description="KmsEncryptionContext is a json blob that contains an encryption context. See https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context for more information", alias="kmsEncryptionContext") + kms_key_id: Optional[StrictStr] = Field(default=None, description="KMSKeyId tells the driver to encrypt the object using the specified KMS Key.", alias="kmsKeyId") + server_side_customer_key_secret: Optional[SecretKeySelector] = Field(default=None, alias="serverSideCustomerKeySecret") + __properties: ClassVar[List[str]] = ["enableEncryption", "kmsEncryptionContext", "kmsKeyId", "serverSideCustomerKeySecret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1S3EncryptionOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of server_side_customer_key_secret + if self.server_side_customer_key_secret: + _dict['serverSideCustomerKeySecret'] = self.server_side_customer_key_secret.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1S3EncryptionOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "enableEncryption": obj.get("enableEncryption"), + "kmsEncryptionContext": obj.get("kmsEncryptionContext"), + "kmsKeyId": obj.get("kmsKeyId"), + "serverSideCustomerKeySecret": SecretKeySelector.from_dict(obj["serverSideCustomerKeySecret"]) if obj.get("serverSideCustomerKeySecret") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_script_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_script_template.py new file mode 100644 index 000000000000..5d0ec2cc979d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_script_template.py @@ -0,0 +1,193 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.container_port import ContainerPort +from argo_workflows.models.env_from_source import EnvFromSource +from argo_workflows.models.env_var import EnvVar +from argo_workflows.models.lifecycle import Lifecycle +from argo_workflows.models.probe import Probe +from argo_workflows.models.resource_requirements import ResourceRequirements +from argo_workflows.models.security_context import SecurityContext +from argo_workflows.models.volume_device import VolumeDevice +from argo_workflows.models.volume_mount import VolumeMount +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ScriptTemplate(BaseModel): + """ + ScriptTemplate is a template subtype to enable scripting through code steps + """ # noqa: E501 + args: Optional[List[StrictStr]] = Field(default=None, description="Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell") + command: Optional[List[StrictStr]] = Field(default=None, description="Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell") + env: Optional[List[EnvVar]] = Field(default=None, description="List of environment variables to set in the container. Cannot be updated.") + env_from: Optional[List[EnvFromSource]] = Field(default=None, description="List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.", alias="envFrom") + image: StrictStr = Field(description="Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.") + image_pull_policy: Optional[StrictStr] = Field(default=None, description="Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images", alias="imagePullPolicy") + lifecycle: Optional[Lifecycle] = None + liveness_probe: Optional[Probe] = Field(default=None, alias="livenessProbe") + name: Optional[StrictStr] = Field(default=None, description="Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated.") + ports: Optional[List[ContainerPort]] = Field(default=None, description="List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated.") + readiness_probe: Optional[Probe] = Field(default=None, alias="readinessProbe") + resources: Optional[ResourceRequirements] = None + security_context: Optional[SecurityContext] = Field(default=None, alias="securityContext") + source: StrictStr = Field(description="Source contains the source code of the script to execute") + startup_probe: Optional[Probe] = Field(default=None, alias="startupProbe") + stdin: Optional[StrictBool] = Field(default=None, description="Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.") + stdin_once: Optional[StrictBool] = Field(default=None, description="Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false", alias="stdinOnce") + termination_message_path: Optional[StrictStr] = Field(default=None, description="Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.", alias="terminationMessagePath") + termination_message_policy: Optional[StrictStr] = Field(default=None, description="Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated.", alias="terminationMessagePolicy") + tty: Optional[StrictBool] = Field(default=None, description="Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.") + volume_devices: Optional[List[VolumeDevice]] = Field(default=None, description="volumeDevices is the list of block devices to be used by the container.", alias="volumeDevices") + volume_mounts: Optional[List[VolumeMount]] = Field(default=None, description="Pod volumes to mount into the container's filesystem. Cannot be updated.", alias="volumeMounts") + working_dir: Optional[StrictStr] = Field(default=None, description="Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.", alias="workingDir") + __properties: ClassVar[List[str]] = ["args", "command", "env", "envFrom", "image", "imagePullPolicy", "lifecycle", "livenessProbe", "name", "ports", "readinessProbe", "resources", "securityContext", "source", "startupProbe", "stdin", "stdinOnce", "terminationMessagePath", "terminationMessagePolicy", "tty", "volumeDevices", "volumeMounts", "workingDir"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ScriptTemplate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in env (list) + _items = [] + if self.env: + for _item in self.env: + if _item: + _items.append(_item.to_dict()) + _dict['env'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in env_from (list) + _items = [] + if self.env_from: + for _item in self.env_from: + if _item: + _items.append(_item.to_dict()) + _dict['envFrom'] = _items + # override the default output from pydantic by calling `to_dict()` of lifecycle + if self.lifecycle: + _dict['lifecycle'] = self.lifecycle.to_dict() + # override the default output from pydantic by calling `to_dict()` of liveness_probe + if self.liveness_probe: + _dict['livenessProbe'] = self.liveness_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in ports (list) + _items = [] + if self.ports: + for _item in self.ports: + if _item: + _items.append(_item.to_dict()) + _dict['ports'] = _items + # override the default output from pydantic by calling `to_dict()` of readiness_probe + if self.readiness_probe: + _dict['readinessProbe'] = self.readiness_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of resources + if self.resources: + _dict['resources'] = self.resources.to_dict() + # override the default output from pydantic by calling `to_dict()` of security_context + if self.security_context: + _dict['securityContext'] = self.security_context.to_dict() + # override the default output from pydantic by calling `to_dict()` of startup_probe + if self.startup_probe: + _dict['startupProbe'] = self.startup_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in volume_devices (list) + _items = [] + if self.volume_devices: + for _item in self.volume_devices: + if _item: + _items.append(_item.to_dict()) + _dict['volumeDevices'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in volume_mounts (list) + _items = [] + if self.volume_mounts: + for _item in self.volume_mounts: + if _item: + _items.append(_item.to_dict()) + _dict['volumeMounts'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ScriptTemplate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "args": obj.get("args"), + "command": obj.get("command"), + "env": [EnvVar.from_dict(_item) for _item in obj["env"]] if obj.get("env") is not None else None, + "envFrom": [EnvFromSource.from_dict(_item) for _item in obj["envFrom"]] if obj.get("envFrom") is not None else None, + "image": obj.get("image"), + "imagePullPolicy": obj.get("imagePullPolicy"), + "lifecycle": Lifecycle.from_dict(obj["lifecycle"]) if obj.get("lifecycle") is not None else None, + "livenessProbe": Probe.from_dict(obj["livenessProbe"]) if obj.get("livenessProbe") is not None else None, + "name": obj.get("name"), + "ports": [ContainerPort.from_dict(_item) for _item in obj["ports"]] if obj.get("ports") is not None else None, + "readinessProbe": Probe.from_dict(obj["readinessProbe"]) if obj.get("readinessProbe") is not None else None, + "resources": ResourceRequirements.from_dict(obj["resources"]) if obj.get("resources") is not None else None, + "securityContext": SecurityContext.from_dict(obj["securityContext"]) if obj.get("securityContext") is not None else None, + "source": obj.get("source"), + "startupProbe": Probe.from_dict(obj["startupProbe"]) if obj.get("startupProbe") is not None else None, + "stdin": obj.get("stdin"), + "stdinOnce": obj.get("stdinOnce"), + "terminationMessagePath": obj.get("terminationMessagePath"), + "terminationMessagePolicy": obj.get("terminationMessagePolicy"), + "tty": obj.get("tty"), + "volumeDevices": [VolumeDevice.from_dict(_item) for _item in obj["volumeDevices"]] if obj.get("volumeDevices") is not None else None, + "volumeMounts": [VolumeMount.from_dict(_item) for _item in obj["volumeMounts"]] if obj.get("volumeMounts") is not None else None, + "workingDir": obj.get("workingDir") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_holding.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_holding.py new file mode 100644 index 000000000000..e809014470ba --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_holding.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1SemaphoreHolding(BaseModel): + """ + IoArgoprojWorkflowV1alpha1SemaphoreHolding + """ # noqa: E501 + holders: Optional[List[StrictStr]] = Field(default=None, description="Holders stores the list of current holder names in the io.argoproj.workflow.v1alpha1.") + semaphore: Optional[StrictStr] = Field(default=None, description="Semaphore stores the semaphore name.") + __properties: ClassVar[List[str]] = ["holders", "semaphore"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SemaphoreHolding from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SemaphoreHolding from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "holders": obj.get("holders"), + "semaphore": obj.get("semaphore") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_ref.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_ref.py new file mode 100644 index 000000000000..a1d6ffd78f26 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_ref.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1SemaphoreRef(BaseModel): + """ + SemaphoreRef is a reference of Semaphore + """ # noqa: E501 + config_map_key_ref: Optional[ConfigMapKeySelector] = Field(default=None, alias="configMapKeyRef") + namespace: Optional[StrictStr] = Field(default=None, description="Namespace is the namespace of the configmap, default: [namespace of workflow]") + __properties: ClassVar[List[str]] = ["configMapKeyRef", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SemaphoreRef from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config_map_key_ref + if self.config_map_key_ref: + _dict['configMapKeyRef'] = self.config_map_key_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SemaphoreRef from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configMapKeyRef": ConfigMapKeySelector.from_dict(obj["configMapKeyRef"]) if obj.get("configMapKeyRef") is not None else None, + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_status.py new file mode 100644 index 000000000000..7656c49cfdb1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_semaphore_status.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_holding import IoArgoprojWorkflowV1alpha1SemaphoreHolding +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1SemaphoreStatus(BaseModel): + """ + IoArgoprojWorkflowV1alpha1SemaphoreStatus + """ # noqa: E501 + holding: Optional[List[IoArgoprojWorkflowV1alpha1SemaphoreHolding]] = Field(default=None, description="Holding stores the list of resource acquired synchronization lock for workflows.") + waiting: Optional[List[IoArgoprojWorkflowV1alpha1SemaphoreHolding]] = Field(default=None, description="Waiting indicates the list of current synchronization lock holders.") + __properties: ClassVar[List[str]] = ["holding", "waiting"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SemaphoreStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in holding (list) + _items = [] + if self.holding: + for _item in self.holding: + if _item: + _items.append(_item.to_dict()) + _dict['holding'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in waiting (list) + _items = [] + if self.waiting: + for _item in self.waiting: + if _item: + _items.append(_item.to_dict()) + _dict['waiting'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SemaphoreStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "holding": [IoArgoprojWorkflowV1alpha1SemaphoreHolding.from_dict(_item) for _item in obj["holding"]] if obj.get("holding") is not None else None, + "waiting": [IoArgoprojWorkflowV1alpha1SemaphoreHolding.from_dict(_item) for _item in obj["waiting"]] if obj.get("waiting") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_sequence.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_sequence.py new file mode 100644 index 000000000000..d7b9d4f130d6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_sequence.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Sequence(BaseModel): + """ + Sequence expands a workflow step into numeric range + """ # noqa: E501 + count: Optional[StrictStr] = None + end: Optional[StrictStr] = None + format: Optional[StrictStr] = Field(default=None, description="Format is a printf format string to format the value in the sequence") + start: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["count", "end", "format", "start"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Sequence from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Sequence from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "count": obj.get("count"), + "end": obj.get("end"), + "format": obj.get("format"), + "start": obj.get("start") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_stop_strategy.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_stop_strategy.py new file mode 100644 index 000000000000..9b3f9724ada0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_stop_strategy.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1StopStrategy(BaseModel): + """ + StopStrategy defines if the cron workflow will stop being triggered once a certain condition has been reached, involving a number of runs of the workflow + """ # noqa: E501 + condition: StrictStr = Field(description="Condition defines a condition that stops scheduling workflows when evaluates to true. Use the keywords `failed` or `succeeded` to access the number of failed or successful child workflows.") + __properties: ClassVar[List[str]] = ["condition"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1StopStrategy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1StopStrategy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "condition": obj.get("condition") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_submit.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_submit.py new file mode 100644 index 000000000000..e4d74a47823b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_submit.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_ref import IoArgoprojWorkflowV1alpha1WorkflowTemplateRef +from argo_workflows.models.object_meta import ObjectMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Submit(BaseModel): + """ + IoArgoprojWorkflowV1alpha1Submit + """ # noqa: E501 + arguments: Optional[IoArgoprojWorkflowV1alpha1Arguments] = None + metadata: Optional[ObjectMeta] = None + workflow_template_ref: IoArgoprojWorkflowV1alpha1WorkflowTemplateRef = Field(alias="workflowTemplateRef") + __properties: ClassVar[List[str]] = ["arguments", "metadata", "workflowTemplateRef"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Submit from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of arguments + if self.arguments: + _dict['arguments'] = self.arguments.to_dict() + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of workflow_template_ref + if self.workflow_template_ref: + _dict['workflowTemplateRef'] = self.workflow_template_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Submit from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "arguments": IoArgoprojWorkflowV1alpha1Arguments.from_dict(obj["arguments"]) if obj.get("arguments") is not None else None, + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "workflowTemplateRef": IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.from_dict(obj["workflowTemplateRef"]) if obj.get("workflowTemplateRef") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_submit_opts.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_submit_opts.py new file mode 100644 index 000000000000..dfc8d8748743 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_submit_opts.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.owner_reference import OwnerReference +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1SubmitOpts(BaseModel): + """ + SubmitOpts are workflow submission options + """ # noqa: E501 + annotations: Optional[StrictStr] = Field(default=None, description="Annotations adds to metadata.labels") + dry_run: Optional[StrictBool] = Field(default=None, description="DryRun validates the workflow on the client-side without creating it. This option is not supported in API", alias="dryRun") + entry_point: Optional[StrictStr] = Field(default=None, description="Entrypoint overrides spec.entrypoint", alias="entryPoint") + generate_name: Optional[StrictStr] = Field(default=None, description="GenerateName overrides metadata.generateName", alias="generateName") + labels: Optional[StrictStr] = Field(default=None, description="Labels adds to metadata.labels") + name: Optional[StrictStr] = Field(default=None, description="Name overrides metadata.name") + owner_reference: Optional[OwnerReference] = Field(default=None, alias="ownerReference") + parameters: Optional[List[StrictStr]] = Field(default=None, description="Parameters passes input parameters to workflow") + pod_priority_class_name: Optional[StrictStr] = Field(default=None, description="Set the podPriorityClassName of the workflow", alias="podPriorityClassName") + priority: Optional[StrictInt] = Field(default=None, description="Priority is used if controller is configured to process limited number of workflows in parallel, higher priority workflows are processed first.") + server_dry_run: Optional[StrictBool] = Field(default=None, description="ServerDryRun validates the workflow on the server-side without creating it", alias="serverDryRun") + service_account: Optional[StrictStr] = Field(default=None, description="ServiceAccount runs all pods in the workflow using specified ServiceAccount.", alias="serviceAccount") + __properties: ClassVar[List[str]] = ["annotations", "dryRun", "entryPoint", "generateName", "labels", "name", "ownerReference", "parameters", "podPriorityClassName", "priority", "serverDryRun", "serviceAccount"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SubmitOpts from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of owner_reference + if self.owner_reference: + _dict['ownerReference'] = self.owner_reference.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SubmitOpts from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "annotations": obj.get("annotations"), + "dryRun": obj.get("dryRun"), + "entryPoint": obj.get("entryPoint"), + "generateName": obj.get("generateName"), + "labels": obj.get("labels"), + "name": obj.get("name"), + "ownerReference": OwnerReference.from_dict(obj["ownerReference"]) if obj.get("ownerReference") is not None else None, + "parameters": obj.get("parameters"), + "podPriorityClassName": obj.get("podPriorityClassName"), + "priority": obj.get("priority"), + "serverDryRun": obj.get("serverDryRun"), + "serviceAccount": obj.get("serviceAccount") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_suspend_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_suspend_template.py new file mode 100644 index 000000000000..3726a9226610 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_suspend_template.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1SuspendTemplate(BaseModel): + """ + SuspendTemplate is a template subtype to suspend a workflow at a predetermined point in time + """ # noqa: E501 + duration: Optional[StrictStr] = Field(default=None, description="Duration is the seconds to wait before automatically resuming a template. Must be a string. Default unit is seconds. Could also be a Duration, e.g.: \"2m\", \"6h\"") + __properties: ClassVar[List[str]] = ["duration"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SuspendTemplate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SuspendTemplate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "duration": obj.get("duration") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_synchronization.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_synchronization.py new file mode 100644 index 000000000000..4cacd84c61c3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_synchronization.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex import IoArgoprojWorkflowV1alpha1Mutex +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_ref import IoArgoprojWorkflowV1alpha1SemaphoreRef +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Synchronization(BaseModel): + """ + Synchronization holds synchronization lock configuration + """ # noqa: E501 + mutex: Optional[IoArgoprojWorkflowV1alpha1Mutex] = None + semaphore: Optional[IoArgoprojWorkflowV1alpha1SemaphoreRef] = None + __properties: ClassVar[List[str]] = ["mutex", "semaphore"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Synchronization from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of mutex + if self.mutex: + _dict['mutex'] = self.mutex.to_dict() + # override the default output from pydantic by calling `to_dict()` of semaphore + if self.semaphore: + _dict['semaphore'] = self.semaphore.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Synchronization from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "mutex": IoArgoprojWorkflowV1alpha1Mutex.from_dict(obj["mutex"]) if obj.get("mutex") is not None else None, + "semaphore": IoArgoprojWorkflowV1alpha1SemaphoreRef.from_dict(obj["semaphore"]) if obj.get("semaphore") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_synchronization_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_synchronization_status.py new file mode 100644 index 000000000000..c0fde3abeeda --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_synchronization_status.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex_status import IoArgoprojWorkflowV1alpha1MutexStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_status import IoArgoprojWorkflowV1alpha1SemaphoreStatus +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1SynchronizationStatus(BaseModel): + """ + SynchronizationStatus stores the status of semaphore and mutex. + """ # noqa: E501 + mutex: Optional[IoArgoprojWorkflowV1alpha1MutexStatus] = None + semaphore: Optional[IoArgoprojWorkflowV1alpha1SemaphoreStatus] = None + __properties: ClassVar[List[str]] = ["mutex", "semaphore"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SynchronizationStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of mutex + if self.mutex: + _dict['mutex'] = self.mutex.to_dict() + # override the default output from pydantic by calling `to_dict()` of semaphore + if self.semaphore: + _dict['semaphore'] = self.semaphore.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1SynchronizationStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "mutex": IoArgoprojWorkflowV1alpha1MutexStatus.from_dict(obj["mutex"]) if obj.get("mutex") is not None else None, + "semaphore": IoArgoprojWorkflowV1alpha1SemaphoreStatus.from_dict(obj["semaphore"]) if obj.get("semaphore") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_tar_strategy.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_tar_strategy.py new file mode 100644 index 000000000000..0192ed729708 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_tar_strategy.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1TarStrategy(BaseModel): + """ + TarStrategy will tar and gzip the file or directory when saving + """ # noqa: E501 + compression_level: Optional[StrictInt] = Field(default=None, description="CompressionLevel specifies the gzip compression level to use for the artifact. Defaults to gzip.DefaultCompression.", alias="compressionLevel") + __properties: ClassVar[List[str]] = ["compressionLevel"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1TarStrategy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1TarStrategy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "compressionLevel": obj.get("compressionLevel") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_template.py new file mode 100644 index 000000000000..6fe2476ab39f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_template.py @@ -0,0 +1,288 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.affinity import Affinity +from argo_workflows.models.container import Container +from argo_workflows.models.host_alias import HostAlias +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_set_template import IoArgoprojWorkflowV1alpha1ContainerSetTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_data import IoArgoprojWorkflowV1alpha1Data +from argo_workflows.models.io_argoproj_workflow_v1alpha1_executor_config import IoArgoprojWorkflowV1alpha1ExecutorConfig +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http import IoArgoprojWorkflowV1alpha1HTTP +from argo_workflows.models.io_argoproj_workflow_v1alpha1_inputs import IoArgoprojWorkflowV1alpha1Inputs +from argo_workflows.models.io_argoproj_workflow_v1alpha1_memoize import IoArgoprojWorkflowV1alpha1Memoize +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metrics import IoArgoprojWorkflowV1alpha1Metrics +from argo_workflows.models.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parallel_steps import IoArgoprojWorkflowV1alpha1ParallelSteps +from argo_workflows.models.io_argoproj_workflow_v1alpha1_resource_template import IoArgoprojWorkflowV1alpha1ResourceTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_strategy import IoArgoprojWorkflowV1alpha1RetryStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_script_template import IoArgoprojWorkflowV1alpha1ScriptTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_suspend_template import IoArgoprojWorkflowV1alpha1SuspendTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_synchronization import IoArgoprojWorkflowV1alpha1Synchronization +from argo_workflows.models.io_argoproj_workflow_v1alpha1_user_container import IoArgoprojWorkflowV1alpha1UserContainer +from argo_workflows.models.pod_security_context import PodSecurityContext +from argo_workflows.models.toleration import Toleration +from argo_workflows.models.volume import Volume +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Template(BaseModel): + """ + Template is a reusable and composable unit of execution in a workflow + """ # noqa: E501 + active_deadline_seconds: Optional[StrictStr] = Field(default=None, alias="activeDeadlineSeconds") + affinity: Optional[Affinity] = None + archive_location: Optional[IoArgoprojWorkflowV1alpha1ArtifactLocation] = Field(default=None, alias="archiveLocation") + automount_service_account_token: Optional[StrictBool] = Field(default=None, description="AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false.", alias="automountServiceAccountToken") + container: Optional[Container] = None + container_set: Optional[IoArgoprojWorkflowV1alpha1ContainerSetTemplate] = Field(default=None, alias="containerSet") + daemon: Optional[StrictBool] = Field(default=None, description="Daemon will allow a workflow to proceed to the next step so long as the container reaches readiness") + dag: Optional[IoArgoprojWorkflowV1alpha1DAGTemplate] = None + data: Optional[IoArgoprojWorkflowV1alpha1Data] = None + executor: Optional[IoArgoprojWorkflowV1alpha1ExecutorConfig] = None + fail_fast: Optional[StrictBool] = Field(default=None, description="FailFast, if specified, will fail this template if any of its child pods has failed. This is useful for when this template is expanded with `withItems`, etc.", alias="failFast") + host_aliases: Optional[List[HostAlias]] = Field(default=None, description="HostAliases is an optional list of hosts and IPs that will be injected into the pod spec", alias="hostAliases") + http: Optional[IoArgoprojWorkflowV1alpha1HTTP] = None + init_containers: Optional[List[IoArgoprojWorkflowV1alpha1UserContainer]] = Field(default=None, description="InitContainers is a list of containers which run before the main container.", alias="initContainers") + inputs: Optional[IoArgoprojWorkflowV1alpha1Inputs] = None + memoize: Optional[IoArgoprojWorkflowV1alpha1Memoize] = None + metadata: Optional[IoArgoprojWorkflowV1alpha1Metadata] = None + metrics: Optional[IoArgoprojWorkflowV1alpha1Metrics] = None + name: Optional[StrictStr] = Field(default=None, description="Name is the name of the template") + node_selector: Optional[Dict[str, StrictStr]] = Field(default=None, description="NodeSelector is a selector to schedule this step of the workflow to be run on the selected node(s). Overrides the selector set at the workflow level.", alias="nodeSelector") + outputs: Optional[IoArgoprojWorkflowV1alpha1Outputs] = None + parallelism: Optional[StrictInt] = Field(default=None, description="Parallelism limits the max total parallel pods that can execute at the same time within the boundaries of this template invocation. If additional steps/dag templates are invoked, the pods created by those templates will not be counted towards this total.") + plugin: Optional[Dict[str, Any]] = Field(default=None, description="Plugin is an Object with exactly one key") + pod_spec_patch: Optional[StrictStr] = Field(default=None, description="PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits).", alias="podSpecPatch") + priority: Optional[StrictInt] = Field(default=None, description="Priority to apply to workflow pods.") + priority_class_name: Optional[StrictStr] = Field(default=None, description="PriorityClassName to apply to workflow pods.", alias="priorityClassName") + resource: Optional[IoArgoprojWorkflowV1alpha1ResourceTemplate] = None + retry_strategy: Optional[IoArgoprojWorkflowV1alpha1RetryStrategy] = Field(default=None, alias="retryStrategy") + scheduler_name: Optional[StrictStr] = Field(default=None, description="If specified, the pod will be dispatched by specified scheduler. Or it will be dispatched by workflow scope scheduler if specified. If neither specified, the pod will be dispatched by default scheduler.", alias="schedulerName") + script: Optional[IoArgoprojWorkflowV1alpha1ScriptTemplate] = None + security_context: Optional[PodSecurityContext] = Field(default=None, alias="securityContext") + service_account_name: Optional[StrictStr] = Field(default=None, description="ServiceAccountName to apply to workflow pods", alias="serviceAccountName") + sidecars: Optional[List[IoArgoprojWorkflowV1alpha1UserContainer]] = Field(default=None, description="Sidecars is a list of containers which run alongside the main container Sidecars are automatically killed when the main container completes") + steps: Optional[List[IoArgoprojWorkflowV1alpha1ParallelSteps]] = Field(default=None, description="Steps define a series of sequential/parallel workflow steps") + suspend: Optional[IoArgoprojWorkflowV1alpha1SuspendTemplate] = None + synchronization: Optional[IoArgoprojWorkflowV1alpha1Synchronization] = None + timeout: Optional[StrictStr] = Field(default=None, description="Timeout allows to set the total node execution timeout duration counting from the node's start time. This duration also includes time in which the node spends in Pending state. This duration may not be applied to Step or DAG templates.") + tolerations: Optional[List[Toleration]] = Field(default=None, description="Tolerations to apply to workflow pods.") + volumes: Optional[List[Volume]] = Field(default=None, description="Volumes is a list of volumes that can be mounted by containers in a template.") + __properties: ClassVar[List[str]] = ["activeDeadlineSeconds", "affinity", "archiveLocation", "automountServiceAccountToken", "container", "containerSet", "daemon", "dag", "data", "executor", "failFast", "hostAliases", "http", "initContainers", "inputs", "memoize", "metadata", "metrics", "name", "nodeSelector", "outputs", "parallelism", "plugin", "podSpecPatch", "priority", "priorityClassName", "resource", "retryStrategy", "schedulerName", "script", "securityContext", "serviceAccountName", "sidecars", "steps", "suspend", "synchronization", "timeout", "tolerations", "volumes"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Template from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of affinity + if self.affinity: + _dict['affinity'] = self.affinity.to_dict() + # override the default output from pydantic by calling `to_dict()` of archive_location + if self.archive_location: + _dict['archiveLocation'] = self.archive_location.to_dict() + # override the default output from pydantic by calling `to_dict()` of container + if self.container: + _dict['container'] = self.container.to_dict() + # override the default output from pydantic by calling `to_dict()` of container_set + if self.container_set: + _dict['containerSet'] = self.container_set.to_dict() + # override the default output from pydantic by calling `to_dict()` of dag + if self.dag: + _dict['dag'] = self.dag.to_dict() + # override the default output from pydantic by calling `to_dict()` of data + if self.data: + _dict['data'] = self.data.to_dict() + # override the default output from pydantic by calling `to_dict()` of executor + if self.executor: + _dict['executor'] = self.executor.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in host_aliases (list) + _items = [] + if self.host_aliases: + for _item in self.host_aliases: + if _item: + _items.append(_item.to_dict()) + _dict['hostAliases'] = _items + # override the default output from pydantic by calling `to_dict()` of http + if self.http: + _dict['http'] = self.http.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in init_containers (list) + _items = [] + if self.init_containers: + for _item in self.init_containers: + if _item: + _items.append(_item.to_dict()) + _dict['initContainers'] = _items + # override the default output from pydantic by calling `to_dict()` of inputs + if self.inputs: + _dict['inputs'] = self.inputs.to_dict() + # override the default output from pydantic by calling `to_dict()` of memoize + if self.memoize: + _dict['memoize'] = self.memoize.to_dict() + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of metrics + if self.metrics: + _dict['metrics'] = self.metrics.to_dict() + # override the default output from pydantic by calling `to_dict()` of outputs + if self.outputs: + _dict['outputs'] = self.outputs.to_dict() + # override the default output from pydantic by calling `to_dict()` of resource + if self.resource: + _dict['resource'] = self.resource.to_dict() + # override the default output from pydantic by calling `to_dict()` of retry_strategy + if self.retry_strategy: + _dict['retryStrategy'] = self.retry_strategy.to_dict() + # override the default output from pydantic by calling `to_dict()` of script + if self.script: + _dict['script'] = self.script.to_dict() + # override the default output from pydantic by calling `to_dict()` of security_context + if self.security_context: + _dict['securityContext'] = self.security_context.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in sidecars (list) + _items = [] + if self.sidecars: + for _item in self.sidecars: + if _item: + _items.append(_item.to_dict()) + _dict['sidecars'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in steps (list) + _items = [] + if self.steps: + for _item in self.steps: + if _item: + _items.append(_item.to_dict()) + _dict['steps'] = _items + # override the default output from pydantic by calling `to_dict()` of suspend + if self.suspend: + _dict['suspend'] = self.suspend.to_dict() + # override the default output from pydantic by calling `to_dict()` of synchronization + if self.synchronization: + _dict['synchronization'] = self.synchronization.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in tolerations (list) + _items = [] + if self.tolerations: + for _item in self.tolerations: + if _item: + _items.append(_item.to_dict()) + _dict['tolerations'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in volumes (list) + _items = [] + if self.volumes: + for _item in self.volumes: + if _item: + _items.append(_item.to_dict()) + _dict['volumes'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Template from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "activeDeadlineSeconds": obj.get("activeDeadlineSeconds"), + "affinity": Affinity.from_dict(obj["affinity"]) if obj.get("affinity") is not None else None, + "archiveLocation": IoArgoprojWorkflowV1alpha1ArtifactLocation.from_dict(obj["archiveLocation"]) if obj.get("archiveLocation") is not None else None, + "automountServiceAccountToken": obj.get("automountServiceAccountToken"), + "container": Container.from_dict(obj["container"]) if obj.get("container") is not None else None, + "containerSet": IoArgoprojWorkflowV1alpha1ContainerSetTemplate.from_dict(obj["containerSet"]) if obj.get("containerSet") is not None else None, + "daemon": obj.get("daemon"), + "dag": IoArgoprojWorkflowV1alpha1DAGTemplate.from_dict(obj["dag"]) if obj.get("dag") is not None else None, + "data": IoArgoprojWorkflowV1alpha1Data.from_dict(obj["data"]) if obj.get("data") is not None else None, + "executor": IoArgoprojWorkflowV1alpha1ExecutorConfig.from_dict(obj["executor"]) if obj.get("executor") is not None else None, + "failFast": obj.get("failFast"), + "hostAliases": [HostAlias.from_dict(_item) for _item in obj["hostAliases"]] if obj.get("hostAliases") is not None else None, + "http": IoArgoprojWorkflowV1alpha1HTTP.from_dict(obj["http"]) if obj.get("http") is not None else None, + "initContainers": [IoArgoprojWorkflowV1alpha1UserContainer.from_dict(_item) for _item in obj["initContainers"]] if obj.get("initContainers") is not None else None, + "inputs": IoArgoprojWorkflowV1alpha1Inputs.from_dict(obj["inputs"]) if obj.get("inputs") is not None else None, + "memoize": IoArgoprojWorkflowV1alpha1Memoize.from_dict(obj["memoize"]) if obj.get("memoize") is not None else None, + "metadata": IoArgoprojWorkflowV1alpha1Metadata.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "metrics": IoArgoprojWorkflowV1alpha1Metrics.from_dict(obj["metrics"]) if obj.get("metrics") is not None else None, + "name": obj.get("name"), + "nodeSelector": obj.get("nodeSelector"), + "outputs": IoArgoprojWorkflowV1alpha1Outputs.from_dict(obj["outputs"]) if obj.get("outputs") is not None else None, + "parallelism": obj.get("parallelism"), + "plugin": obj.get("plugin"), + "podSpecPatch": obj.get("podSpecPatch"), + "priority": obj.get("priority"), + "priorityClassName": obj.get("priorityClassName"), + "resource": IoArgoprojWorkflowV1alpha1ResourceTemplate.from_dict(obj["resource"]) if obj.get("resource") is not None else None, + "retryStrategy": IoArgoprojWorkflowV1alpha1RetryStrategy.from_dict(obj["retryStrategy"]) if obj.get("retryStrategy") is not None else None, + "schedulerName": obj.get("schedulerName"), + "script": IoArgoprojWorkflowV1alpha1ScriptTemplate.from_dict(obj["script"]) if obj.get("script") is not None else None, + "securityContext": PodSecurityContext.from_dict(obj["securityContext"]) if obj.get("securityContext") is not None else None, + "serviceAccountName": obj.get("serviceAccountName"), + "sidecars": [IoArgoprojWorkflowV1alpha1UserContainer.from_dict(_item) for _item in obj["sidecars"]] if obj.get("sidecars") is not None else None, + "steps": [IoArgoprojWorkflowV1alpha1ParallelSteps.from_dict(_item) for _item in obj["steps"]] if obj.get("steps") is not None else None, + "suspend": IoArgoprojWorkflowV1alpha1SuspendTemplate.from_dict(obj["suspend"]) if obj.get("suspend") is not None else None, + "synchronization": IoArgoprojWorkflowV1alpha1Synchronization.from_dict(obj["synchronization"]) if obj.get("synchronization") is not None else None, + "timeout": obj.get("timeout"), + "tolerations": [Toleration.from_dict(_item) for _item in obj["tolerations"]] if obj.get("tolerations") is not None else None, + "volumes": [Volume.from_dict(_item) for _item in obj["volumes"]] if obj.get("volumes") is not None else None + }) + return _obj + +from argo_workflows.models.io_argoproj_workflow_v1alpha1_dag_template import IoArgoprojWorkflowV1alpha1DAGTemplate +# TODO: Rewrite to not use raise_errors +IoArgoprojWorkflowV1alpha1Template.model_rebuild(raise_errors=False) + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_template_ref.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_template_ref.py new file mode 100644 index 000000000000..136713f93537 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_template_ref.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1TemplateRef(BaseModel): + """ + TemplateRef is a reference of template resource. + """ # noqa: E501 + cluster_scope: Optional[StrictBool] = Field(default=None, description="ClusterScope indicates the referred template is cluster scoped (i.e. a ClusterWorkflowTemplate).", alias="clusterScope") + name: Optional[StrictStr] = Field(default=None, description="Name is the resource name of the template.") + template: Optional[StrictStr] = Field(default=None, description="Template is the name of referred template in the resource.") + __properties: ClassVar[List[str]] = ["clusterScope", "name", "template"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1TemplateRef from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1TemplateRef from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "clusterScope": obj.get("clusterScope"), + "name": obj.get("name"), + "template": obj.get("template") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_transformation_step.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_transformation_step.py new file mode 100644 index 000000000000..4a98fc083bf9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_transformation_step.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1TransformationStep(BaseModel): + """ + IoArgoprojWorkflowV1alpha1TransformationStep + """ # noqa: E501 + expression: StrictStr = Field(description="Expression defines an expr expression to apply") + __properties: ClassVar[List[str]] = ["expression"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1TransformationStep from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1TransformationStep from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "expression": obj.get("expression") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_ttl_strategy.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_ttl_strategy.py new file mode 100644 index 000000000000..18d36827f5ec --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_ttl_strategy.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1TTLStrategy(BaseModel): + """ + TTLStrategy is the strategy for the time to live depending on if the workflow succeeded or failed + """ # noqa: E501 + seconds_after_completion: Optional[StrictInt] = Field(default=None, description="SecondsAfterCompletion is the number of seconds to live after completion", alias="secondsAfterCompletion") + seconds_after_failure: Optional[StrictInt] = Field(default=None, description="SecondsAfterFailure is the number of seconds to live after failure", alias="secondsAfterFailure") + seconds_after_success: Optional[StrictInt] = Field(default=None, description="SecondsAfterSuccess is the number of seconds to live after success", alias="secondsAfterSuccess") + __properties: ClassVar[List[str]] = ["secondsAfterCompletion", "secondsAfterFailure", "secondsAfterSuccess"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1TTLStrategy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1TTLStrategy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "secondsAfterCompletion": obj.get("secondsAfterCompletion"), + "secondsAfterFailure": obj.get("secondsAfterFailure"), + "secondsAfterSuccess": obj.get("secondsAfterSuccess") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_update_cron_workflow_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_update_cron_workflow_request.py new file mode 100644 index 000000000000..233eda49489c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_update_cron_workflow_request.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest + """ # noqa: E501 + cron_workflow: Optional[IoArgoprojWorkflowV1alpha1CronWorkflow] = Field(default=None, alias="cronWorkflow") + name: Optional[StrictStr] = Field(default=None, description="DEPRECATED: This field is ignored.") + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["cronWorkflow", "name", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of cron_workflow + if self.cron_workflow: + _dict['cronWorkflow'] = self.cron_workflow.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cronWorkflow": IoArgoprojWorkflowV1alpha1CronWorkflow.from_dict(obj["cronWorkflow"]) if obj.get("cronWorkflow") is not None else None, + "name": obj.get("name"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_user_container.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_user_container.py new file mode 100644 index 000000000000..4b3db23c8773 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_user_container.py @@ -0,0 +1,193 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.container_port import ContainerPort +from argo_workflows.models.env_from_source import EnvFromSource +from argo_workflows.models.env_var import EnvVar +from argo_workflows.models.lifecycle import Lifecycle +from argo_workflows.models.probe import Probe +from argo_workflows.models.resource_requirements import ResourceRequirements +from argo_workflows.models.security_context import SecurityContext +from argo_workflows.models.volume_device import VolumeDevice +from argo_workflows.models.volume_mount import VolumeMount +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1UserContainer(BaseModel): + """ + UserContainer is a container specified by a user. + """ # noqa: E501 + args: Optional[List[StrictStr]] = Field(default=None, description="Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell") + command: Optional[List[StrictStr]] = Field(default=None, description="Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell") + env: Optional[List[EnvVar]] = Field(default=None, description="List of environment variables to set in the container. Cannot be updated.") + env_from: Optional[List[EnvFromSource]] = Field(default=None, description="List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated.", alias="envFrom") + image: Optional[StrictStr] = Field(default=None, description="Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.") + image_pull_policy: Optional[StrictStr] = Field(default=None, description="Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images", alias="imagePullPolicy") + lifecycle: Optional[Lifecycle] = None + liveness_probe: Optional[Probe] = Field(default=None, alias="livenessProbe") + mirror_volume_mounts: Optional[StrictBool] = Field(default=None, description="MirrorVolumeMounts will mount the same volumes specified in the main container to the container (including artifacts), at the same mountPaths. This enables dind daemon to partially see the same filesystem as the main container in order to use features such as docker volume binding", alias="mirrorVolumeMounts") + name: StrictStr = Field(description="Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated.") + ports: Optional[List[ContainerPort]] = Field(default=None, description="List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated.") + readiness_probe: Optional[Probe] = Field(default=None, alias="readinessProbe") + resources: Optional[ResourceRequirements] = None + security_context: Optional[SecurityContext] = Field(default=None, alias="securityContext") + startup_probe: Optional[Probe] = Field(default=None, alias="startupProbe") + stdin: Optional[StrictBool] = Field(default=None, description="Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false.") + stdin_once: Optional[StrictBool] = Field(default=None, description="Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false", alias="stdinOnce") + termination_message_path: Optional[StrictStr] = Field(default=None, description="Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated.", alias="terminationMessagePath") + termination_message_policy: Optional[StrictStr] = Field(default=None, description="Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated.", alias="terminationMessagePolicy") + tty: Optional[StrictBool] = Field(default=None, description="Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false.") + volume_devices: Optional[List[VolumeDevice]] = Field(default=None, description="volumeDevices is the list of block devices to be used by the container.", alias="volumeDevices") + volume_mounts: Optional[List[VolumeMount]] = Field(default=None, description="Pod volumes to mount into the container's filesystem. Cannot be updated.", alias="volumeMounts") + working_dir: Optional[StrictStr] = Field(default=None, description="Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.", alias="workingDir") + __properties: ClassVar[List[str]] = ["args", "command", "env", "envFrom", "image", "imagePullPolicy", "lifecycle", "livenessProbe", "mirrorVolumeMounts", "name", "ports", "readinessProbe", "resources", "securityContext", "startupProbe", "stdin", "stdinOnce", "terminationMessagePath", "terminationMessagePolicy", "tty", "volumeDevices", "volumeMounts", "workingDir"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1UserContainer from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in env (list) + _items = [] + if self.env: + for _item in self.env: + if _item: + _items.append(_item.to_dict()) + _dict['env'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in env_from (list) + _items = [] + if self.env_from: + for _item in self.env_from: + if _item: + _items.append(_item.to_dict()) + _dict['envFrom'] = _items + # override the default output from pydantic by calling `to_dict()` of lifecycle + if self.lifecycle: + _dict['lifecycle'] = self.lifecycle.to_dict() + # override the default output from pydantic by calling `to_dict()` of liveness_probe + if self.liveness_probe: + _dict['livenessProbe'] = self.liveness_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in ports (list) + _items = [] + if self.ports: + for _item in self.ports: + if _item: + _items.append(_item.to_dict()) + _dict['ports'] = _items + # override the default output from pydantic by calling `to_dict()` of readiness_probe + if self.readiness_probe: + _dict['readinessProbe'] = self.readiness_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of resources + if self.resources: + _dict['resources'] = self.resources.to_dict() + # override the default output from pydantic by calling `to_dict()` of security_context + if self.security_context: + _dict['securityContext'] = self.security_context.to_dict() + # override the default output from pydantic by calling `to_dict()` of startup_probe + if self.startup_probe: + _dict['startupProbe'] = self.startup_probe.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in volume_devices (list) + _items = [] + if self.volume_devices: + for _item in self.volume_devices: + if _item: + _items.append(_item.to_dict()) + _dict['volumeDevices'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in volume_mounts (list) + _items = [] + if self.volume_mounts: + for _item in self.volume_mounts: + if _item: + _items.append(_item.to_dict()) + _dict['volumeMounts'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1UserContainer from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "args": obj.get("args"), + "command": obj.get("command"), + "env": [EnvVar.from_dict(_item) for _item in obj["env"]] if obj.get("env") is not None else None, + "envFrom": [EnvFromSource.from_dict(_item) for _item in obj["envFrom"]] if obj.get("envFrom") is not None else None, + "image": obj.get("image"), + "imagePullPolicy": obj.get("imagePullPolicy"), + "lifecycle": Lifecycle.from_dict(obj["lifecycle"]) if obj.get("lifecycle") is not None else None, + "livenessProbe": Probe.from_dict(obj["livenessProbe"]) if obj.get("livenessProbe") is not None else None, + "mirrorVolumeMounts": obj.get("mirrorVolumeMounts"), + "name": obj.get("name"), + "ports": [ContainerPort.from_dict(_item) for _item in obj["ports"]] if obj.get("ports") is not None else None, + "readinessProbe": Probe.from_dict(obj["readinessProbe"]) if obj.get("readinessProbe") is not None else None, + "resources": ResourceRequirements.from_dict(obj["resources"]) if obj.get("resources") is not None else None, + "securityContext": SecurityContext.from_dict(obj["securityContext"]) if obj.get("securityContext") is not None else None, + "startupProbe": Probe.from_dict(obj["startupProbe"]) if obj.get("startupProbe") is not None else None, + "stdin": obj.get("stdin"), + "stdinOnce": obj.get("stdinOnce"), + "terminationMessagePath": obj.get("terminationMessagePath"), + "terminationMessagePolicy": obj.get("terminationMessagePolicy"), + "tty": obj.get("tty"), + "volumeDevices": [VolumeDevice.from_dict(_item) for _item in obj["volumeDevices"]] if obj.get("volumeDevices") is not None else None, + "volumeMounts": [VolumeMount.from_dict(_item) for _item in obj["volumeMounts"]] if obj.get("volumeMounts") is not None else None, + "workingDir": obj.get("workingDir") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_value_from.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_value_from.py new file mode 100644 index 000000000000..c94a35bc7820 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_value_from.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1ValueFrom(BaseModel): + """ + ValueFrom describes a location in which to obtain the value to a parameter + """ # noqa: E501 + config_map_key_ref: Optional[ConfigMapKeySelector] = Field(default=None, alias="configMapKeyRef") + default: Optional[StrictStr] = Field(default=None, description="Default specifies a value to be used if retrieving the value from the specified source fails") + event: Optional[StrictStr] = Field(default=None, description="Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`") + expression: Optional[StrictStr] = Field(default=None, description="Expression, if defined, is evaluated to specify the value for the parameter") + jq_filter: Optional[StrictStr] = Field(default=None, description="JQFilter expression against the resource object in resource templates", alias="jqFilter") + json_path: Optional[StrictStr] = Field(default=None, description="JSONPath of a resource to retrieve an output parameter value from in resource templates", alias="jsonPath") + parameter: Optional[StrictStr] = Field(default=None, description="Parameter reference to a step or dag task in which to retrieve an output parameter value from (e.g. '{{steps.mystep.outputs.myparam}}')") + path: Optional[StrictStr] = Field(default=None, description="Path in the container to retrieve an output parameter value from in container templates") + supplied: Optional[Dict[str, Any]] = Field(default=None, description="SuppliedValueFrom is a placeholder for a value to be filled in directly, either through the CLI, API, etc.") + __properties: ClassVar[List[str]] = ["configMapKeyRef", "default", "event", "expression", "jqFilter", "jsonPath", "parameter", "path", "supplied"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ValueFrom from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config_map_key_ref + if self.config_map_key_ref: + _dict['configMapKeyRef'] = self.config_map_key_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1ValueFrom from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configMapKeyRef": ConfigMapKeySelector.from_dict(obj["configMapKeyRef"]) if obj.get("configMapKeyRef") is not None else None, + "default": obj.get("default"), + "event": obj.get("event"), + "expression": obj.get("expression"), + "jqFilter": obj.get("jqFilter"), + "jsonPath": obj.get("jsonPath"), + "parameter": obj.get("parameter"), + "path": obj.get("path"), + "supplied": obj.get("supplied") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_version.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_version.py new file mode 100644 index 000000000000..adeefbe25ce5 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_version.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Version(BaseModel): + """ + IoArgoprojWorkflowV1alpha1Version + """ # noqa: E501 + build_date: StrictStr = Field(alias="buildDate") + compiler: StrictStr + git_commit: StrictStr = Field(alias="gitCommit") + git_tag: StrictStr = Field(alias="gitTag") + git_tree_state: StrictStr = Field(alias="gitTreeState") + go_version: StrictStr = Field(alias="goVersion") + platform: StrictStr + version: StrictStr + __properties: ClassVar[List[str]] = ["buildDate", "compiler", "gitCommit", "gitTag", "gitTreeState", "goVersion", "platform", "version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Version from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Version from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "buildDate": obj.get("buildDate"), + "compiler": obj.get("compiler"), + "gitCommit": obj.get("gitCommit"), + "gitTag": obj.get("gitTag"), + "gitTreeState": obj.get("gitTreeState"), + "goVersion": obj.get("goVersion"), + "platform": obj.get("platform"), + "version": obj.get("version") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_volume_claim_gc.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_volume_claim_gc.py new file mode 100644 index 000000000000..5a78d3742055 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_volume_claim_gc.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1VolumeClaimGC(BaseModel): + """ + VolumeClaimGC describes how to delete volumes from completed Workflows + """ # noqa: E501 + strategy: Optional[StrictStr] = Field(default=None, description="Strategy is the strategy to use. One of \"OnWorkflowCompletion\", \"OnWorkflowSuccess\". Defaults to \"OnWorkflowSuccess\"") + __properties: ClassVar[List[str]] = ["strategy"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1VolumeClaimGC from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1VolumeClaimGC from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "strategy": obj.get("strategy") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow.py new file mode 100644 index 000000000000..17d3889e937d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_status import IoArgoprojWorkflowV1alpha1WorkflowStatus +from argo_workflows.models.object_meta import ObjectMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1Workflow(BaseModel): + """ + Workflow is the definition of a workflow resource + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ObjectMeta + spec: IoArgoprojWorkflowV1alpha1WorkflowSpec + status: Optional[IoArgoprojWorkflowV1alpha1WorkflowStatus] = None + __properties: ClassVar[List[str]] = ["apiVersion", "kind", "metadata", "spec", "status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Workflow from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of spec + if self.spec: + _dict['spec'] = self.spec.to_dict() + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1Workflow from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "kind": obj.get("kind"), + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "spec": IoArgoprojWorkflowV1alpha1WorkflowSpec.from_dict(obj["spec"]) if obj.get("spec") is not None else None, + "status": IoArgoprojWorkflowV1alpha1WorkflowStatus.from_dict(obj["status"]) if obj.get("status") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_create_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_create_request.py new file mode 100644 index 000000000000..f309e344db7b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_create_request.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.create_options import CreateOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowCreateRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowCreateRequest + """ # noqa: E501 + create_options: Optional[CreateOptions] = Field(default=None, alias="createOptions") + instance_id: Optional[StrictStr] = Field(default=None, description="This field is no longer used.", alias="instanceID") + namespace: Optional[StrictStr] = None + server_dry_run: Optional[StrictBool] = Field(default=None, alias="serverDryRun") + workflow: Optional[IoArgoprojWorkflowV1alpha1Workflow] = None + __properties: ClassVar[List[str]] = ["createOptions", "instanceID", "namespace", "serverDryRun", "workflow"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowCreateRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of create_options + if self.create_options: + _dict['createOptions'] = self.create_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of workflow + if self.workflow: + _dict['workflow'] = self.workflow.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowCreateRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "createOptions": CreateOptions.from_dict(obj["createOptions"]) if obj.get("createOptions") is not None else None, + "instanceID": obj.get("instanceID"), + "namespace": obj.get("namespace"), + "serverDryRun": obj.get("serverDryRun"), + "workflow": IoArgoprojWorkflowV1alpha1Workflow.from_dict(obj["workflow"]) if obj.get("workflow") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding.py new file mode 100644 index 000000000000..bb9bfe47d965 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding_spec import IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec +from argo_workflows.models.object_meta import ObjectMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowEventBinding(BaseModel): + """ + WorkflowEventBinding is the definition of an event resource + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ObjectMeta + spec: IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec + __properties: ClassVar[List[str]] = ["apiVersion", "kind", "metadata", "spec"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBinding from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of spec + if self.spec: + _dict['spec'] = self.spec.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBinding from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "kind": obj.get("kind"), + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "spec": IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec.from_dict(obj["spec"]) if obj.get("spec") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding_list.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding_list.py new file mode 100644 index 000000000000..c863a30aaa14 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding_list.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding import IoArgoprojWorkflowV1alpha1WorkflowEventBinding +from argo_workflows.models.list_meta import ListMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowEventBindingList(BaseModel): + """ + WorkflowEventBindingList is list of event resources + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + items: List[IoArgoprojWorkflowV1alpha1WorkflowEventBinding] + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ListMeta + __properties: ClassVar[List[str]] = ["apiVersion", "items", "kind", "metadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBindingList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBindingList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "items": [IoArgoprojWorkflowV1alpha1WorkflowEventBinding.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "kind": obj.get("kind"), + "metadata": ListMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding_spec.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding_spec.py new file mode 100644 index 000000000000..88e81e6db4a8 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_event_binding_spec.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_event import IoArgoprojWorkflowV1alpha1Event +from argo_workflows.models.io_argoproj_workflow_v1alpha1_submit import IoArgoprojWorkflowV1alpha1Submit +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec + """ # noqa: E501 + event: IoArgoprojWorkflowV1alpha1Event + submit: Optional[IoArgoprojWorkflowV1alpha1Submit] = None + __properties: ClassVar[List[str]] = ["event", "submit"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of event + if self.event: + _dict['event'] = self.event.to_dict() + # override the default output from pydantic by calling `to_dict()` of submit + if self.submit: + _dict['submit'] = self.submit.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "event": IoArgoprojWorkflowV1alpha1Event.from_dict(obj["event"]) if obj.get("event") is not None else None, + "submit": IoArgoprojWorkflowV1alpha1Submit.from_dict(obj["submit"]) if obj.get("submit") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc.py new file mode 100644 index 000000000000..c1e48e8d84c7 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC(BaseModel): + """ + WorkflowLevelArtifactGC describes how to delete artifacts from completed Workflows - this spec is used on the Workflow level + """ # noqa: E501 + force_finalizer_removal: Optional[StrictBool] = Field(default=None, description="ForceFinalizerRemoval: if set to true, the finalizer will be removed in the case that Artifact GC fails", alias="forceFinalizerRemoval") + pod_metadata: Optional[IoArgoprojWorkflowV1alpha1Metadata] = Field(default=None, alias="podMetadata") + pod_spec_patch: Optional[StrictStr] = Field(default=None, description="PodSpecPatch holds strategic merge patch to apply against the artgc pod spec.", alias="podSpecPatch") + service_account_name: Optional[StrictStr] = Field(default=None, description="ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion", alias="serviceAccountName") + strategy: Optional[StrictStr] = Field(default=None, description="Strategy is the strategy to use.") + __properties: ClassVar[List[str]] = ["forceFinalizerRemoval", "podMetadata", "podSpecPatch", "serviceAccountName", "strategy"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pod_metadata + if self.pod_metadata: + _dict['podMetadata'] = self.pod_metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "forceFinalizerRemoval": obj.get("forceFinalizerRemoval"), + "podMetadata": IoArgoprojWorkflowV1alpha1Metadata.from_dict(obj["podMetadata"]) if obj.get("podMetadata") is not None else None, + "podSpecPatch": obj.get("podSpecPatch"), + "serviceAccountName": obj.get("serviceAccountName"), + "strategy": obj.get("strategy") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_lint_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_lint_request.py new file mode 100644 index 000000000000..4897552b88ad --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_lint_request.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowLintRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowLintRequest + """ # noqa: E501 + namespace: Optional[StrictStr] = None + workflow: Optional[IoArgoprojWorkflowV1alpha1Workflow] = None + __properties: ClassVar[List[str]] = ["namespace", "workflow"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowLintRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of workflow + if self.workflow: + _dict['workflow'] = self.workflow.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowLintRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "namespace": obj.get("namespace"), + "workflow": IoArgoprojWorkflowV1alpha1Workflow.from_dict(obj["workflow"]) if obj.get("workflow") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_list.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_list.py new file mode 100644 index 000000000000..8ba2d683e7f7 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_list.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.list_meta import ListMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowList(BaseModel): + """ + WorkflowList is list of Workflow resources + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + items: List[IoArgoprojWorkflowV1alpha1Workflow] + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ListMeta + __properties: ClassVar[List[str]] = ["apiVersion", "items", "kind", "metadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "items": [IoArgoprojWorkflowV1alpha1Workflow.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "kind": obj.get("kind"), + "metadata": ListMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_metadata.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_metadata.py new file mode 100644 index 000000000000..49e5d5022c58 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_metadata.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_value_from import IoArgoprojWorkflowV1alpha1LabelValueFrom +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowMetadata(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowMetadata + """ # noqa: E501 + annotations: Optional[Dict[str, StrictStr]] = None + labels: Optional[Dict[str, StrictStr]] = None + labels_from: Optional[Dict[str, IoArgoprojWorkflowV1alpha1LabelValueFrom]] = Field(default=None, alias="labelsFrom") + __properties: ClassVar[List[str]] = ["annotations", "labels", "labelsFrom"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowMetadata from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each value in labels_from (dict) + _field_dict = {} + if self.labels_from: + for _key in self.labels_from: + if self.labels_from[_key]: + _field_dict[_key] = self.labels_from[_key].to_dict() + _dict['labelsFrom'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowMetadata from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "annotations": obj.get("annotations"), + "labels": obj.get("labels"), + "labelsFrom": dict( + (_k, IoArgoprojWorkflowV1alpha1LabelValueFrom.from_dict(_v)) + for _k, _v in obj["labelsFrom"].items() + ) + if obj.get("labelsFrom") is not None + else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py new file mode 100644 index 000000000000..c1f2dd3ac326 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_resubmit_request.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest + """ # noqa: E501 + memoized: Optional[StrictBool] = None + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + parameters: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["memoized", "name", "namespace", "parameters"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "memoized": obj.get("memoized"), + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "parameters": obj.get("parameters") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_resume_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_resume_request.py new file mode 100644 index 000000000000..26dd43d23369 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_resume_request.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowResumeRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowResumeRequest + """ # noqa: E501 + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + node_field_selector: Optional[StrictStr] = Field(default=None, alias="nodeFieldSelector") + __properties: ClassVar[List[str]] = ["name", "namespace", "nodeFieldSelector"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowResumeRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowResumeRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "nodeFieldSelector": obj.get("nodeFieldSelector") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_retry_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_retry_request.py new file mode 100644 index 000000000000..0446f01ef814 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_retry_request.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowRetryRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowRetryRequest + """ # noqa: E501 + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + node_field_selector: Optional[StrictStr] = Field(default=None, alias="nodeFieldSelector") + parameters: Optional[List[StrictStr]] = None + restart_successful: Optional[StrictBool] = Field(default=None, alias="restartSuccessful") + __properties: ClassVar[List[str]] = ["name", "namespace", "nodeFieldSelector", "parameters", "restartSuccessful"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowRetryRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowRetryRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "nodeFieldSelector": obj.get("nodeFieldSelector"), + "parameters": obj.get("parameters"), + "restartSuccessful": obj.get("restartSuccessful") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_set_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_set_request.py new file mode 100644 index 000000000000..85cd365c8820 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_set_request.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowSetRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowSetRequest + """ # noqa: E501 + message: Optional[StrictStr] = None + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + node_field_selector: Optional[StrictStr] = Field(default=None, alias="nodeFieldSelector") + output_parameters: Optional[StrictStr] = Field(default=None, alias="outputParameters") + phase: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["message", "name", "namespace", "nodeFieldSelector", "outputParameters", "phase"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowSetRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowSetRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "message": obj.get("message"), + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "nodeFieldSelector": obj.get("nodeFieldSelector"), + "outputParameters": obj.get("outputParameters"), + "phase": obj.get("phase") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_spec.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_spec.py new file mode 100644 index 000000000000..4b05f594daf6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_spec.py @@ -0,0 +1,301 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.affinity import Affinity +from argo_workflows.models.host_alias import HostAlias +from argo_workflows.models.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository_ref import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef +from argo_workflows.models.io_argoproj_workflow_v1alpha1_executor_config import IoArgoprojWorkflowV1alpha1ExecutorConfig +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metrics import IoArgoprojWorkflowV1alpha1Metrics +from argo_workflows.models.io_argoproj_workflow_v1alpha1_pod_gc import IoArgoprojWorkflowV1alpha1PodGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_strategy import IoArgoprojWorkflowV1alpha1RetryStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_synchronization import IoArgoprojWorkflowV1alpha1Synchronization +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template +from argo_workflows.models.io_argoproj_workflow_v1alpha1_ttl_strategy import IoArgoprojWorkflowV1alpha1TTLStrategy +from argo_workflows.models.io_argoproj_workflow_v1alpha1_volume_claim_gc import IoArgoprojWorkflowV1alpha1VolumeClaimGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc import IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_metadata import IoArgoprojWorkflowV1alpha1WorkflowMetadata +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_ref import IoArgoprojWorkflowV1alpha1WorkflowTemplateRef +from argo_workflows.models.io_k8s_api_policy_v1_pod_disruption_budget_spec import IoK8sApiPolicyV1PodDisruptionBudgetSpec +from argo_workflows.models.local_object_reference import LocalObjectReference +from argo_workflows.models.persistent_volume_claim import PersistentVolumeClaim +from argo_workflows.models.pod_dns_config import PodDNSConfig +from argo_workflows.models.pod_security_context import PodSecurityContext +from argo_workflows.models.toleration import Toleration +from argo_workflows.models.volume import Volume +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowSpec(BaseModel): + """ + WorkflowSpec is the specification of a Workflow. + """ # noqa: E501 + active_deadline_seconds: Optional[StrictInt] = Field(default=None, description="Optional duration in seconds relative to the workflow start time which the workflow is allowed to run before the controller terminates the io.argoproj.workflow.v1alpha1. A value of zero is used to terminate a Running workflow", alias="activeDeadlineSeconds") + affinity: Optional[Affinity] = None + archive_logs: Optional[StrictBool] = Field(default=None, description="ArchiveLogs indicates if the container logs should be archived", alias="archiveLogs") + arguments: Optional[IoArgoprojWorkflowV1alpha1Arguments] = None + artifact_gc: Optional[IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC] = Field(default=None, alias="artifactGC") + artifact_repository_ref: Optional[IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef] = Field(default=None, alias="artifactRepositoryRef") + automount_service_account_token: Optional[StrictBool] = Field(default=None, description="AutomountServiceAccountToken indicates whether a service account token should be automatically mounted in pods. ServiceAccountName of ExecutorConfig must be specified if this value is false.", alias="automountServiceAccountToken") + dns_config: Optional[PodDNSConfig] = Field(default=None, alias="dnsConfig") + dns_policy: Optional[StrictStr] = Field(default=None, description="Set DNS policy for workflow pods. Defaults to \"ClusterFirst\". Valid values are 'ClusterFirstWithHostNet', 'ClusterFirst', 'Default' or 'None'. DNS parameters given in DNSConfig will be merged with the policy selected with DNSPolicy. To have DNS options set along with hostNetwork, you have to specify DNS policy explicitly to 'ClusterFirstWithHostNet'.", alias="dnsPolicy") + entrypoint: Optional[StrictStr] = Field(default=None, description="Entrypoint is a template reference to the starting point of the io.argoproj.workflow.v1alpha1.") + executor: Optional[IoArgoprojWorkflowV1alpha1ExecutorConfig] = None + hooks: Optional[Dict[str, IoArgoprojWorkflowV1alpha1LifecycleHook]] = Field(default=None, description="Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step") + host_aliases: Optional[List[HostAlias]] = Field(default=None, alias="hostAliases") + host_network: Optional[StrictBool] = Field(default=None, description="Host networking requested for this workflow pod. Default to false.", alias="hostNetwork") + image_pull_secrets: Optional[List[LocalObjectReference]] = Field(default=None, description="ImagePullSecrets is a list of references to secrets in the same namespace to use for pulling any images in pods that reference this ServiceAccount. ImagePullSecrets are distinct from Secrets because Secrets can be mounted in the pod, but ImagePullSecrets are only accessed by the kubelet. More info: https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod", alias="imagePullSecrets") + metrics: Optional[IoArgoprojWorkflowV1alpha1Metrics] = None + node_selector: Optional[Dict[str, StrictStr]] = Field(default=None, description="NodeSelector is a selector which will result in all pods of the workflow to be scheduled on the selected node(s). This is able to be overridden by a nodeSelector specified in the template.", alias="nodeSelector") + on_exit: Optional[StrictStr] = Field(default=None, description="OnExit is a template reference which is invoked at the end of the workflow, irrespective of the success, failure, or error of the primary io.argoproj.workflow.v1alpha1.", alias="onExit") + parallelism: Optional[StrictInt] = Field(default=None, description="Parallelism limits the max total parallel pods that can execute at the same time in a workflow") + pod_disruption_budget: Optional[IoK8sApiPolicyV1PodDisruptionBudgetSpec] = Field(default=None, alias="podDisruptionBudget") + pod_gc: Optional[IoArgoprojWorkflowV1alpha1PodGC] = Field(default=None, alias="podGC") + pod_metadata: Optional[IoArgoprojWorkflowV1alpha1Metadata] = Field(default=None, alias="podMetadata") + pod_priority: Optional[StrictInt] = Field(default=None, description="Priority to apply to workflow pods. DEPRECATED: Use PodPriorityClassName instead.", alias="podPriority") + pod_priority_class_name: Optional[StrictStr] = Field(default=None, description="PriorityClassName to apply to workflow pods.", alias="podPriorityClassName") + pod_spec_patch: Optional[StrictStr] = Field(default=None, description="PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits).", alias="podSpecPatch") + priority: Optional[StrictInt] = Field(default=None, description="Priority is used if controller is configured to process limited number of workflows in parallel. Workflows with higher priority are processed first.") + retry_strategy: Optional[IoArgoprojWorkflowV1alpha1RetryStrategy] = Field(default=None, alias="retryStrategy") + scheduler_name: Optional[StrictStr] = Field(default=None, description="Set scheduler name for all pods. Will be overridden if container/script template's scheduler name is set. Default scheduler will be used if neither specified.", alias="schedulerName") + security_context: Optional[PodSecurityContext] = Field(default=None, alias="securityContext") + service_account_name: Optional[StrictStr] = Field(default=None, description="ServiceAccountName is the name of the ServiceAccount to run all pods of the workflow as.", alias="serviceAccountName") + shutdown: Optional[StrictStr] = Field(default=None, description="Shutdown will shutdown the workflow according to its ShutdownStrategy") + suspend: Optional[StrictBool] = Field(default=None, description="Suspend will suspend the workflow and prevent execution of any future steps in the workflow") + synchronization: Optional[IoArgoprojWorkflowV1alpha1Synchronization] = None + template_defaults: Optional[IoArgoprojWorkflowV1alpha1Template] = Field(default=None, alias="templateDefaults") + templates: Optional[List[IoArgoprojWorkflowV1alpha1Template]] = Field(default=None, description="Templates is a list of workflow templates used in a workflow") + tolerations: Optional[List[Toleration]] = Field(default=None, description="Tolerations to apply to workflow pods.") + ttl_strategy: Optional[IoArgoprojWorkflowV1alpha1TTLStrategy] = Field(default=None, alias="ttlStrategy") + volume_claim_gc: Optional[IoArgoprojWorkflowV1alpha1VolumeClaimGC] = Field(default=None, alias="volumeClaimGC") + volume_claim_templates: Optional[List[PersistentVolumeClaim]] = Field(default=None, description="VolumeClaimTemplates is a list of claims that containers are allowed to reference. The Workflow controller will create the claims at the beginning of the workflow and delete the claims upon completion of the workflow", alias="volumeClaimTemplates") + volumes: Optional[List[Volume]] = Field(default=None, description="Volumes is a list of volumes that can be mounted by containers in a io.argoproj.workflow.v1alpha1.") + workflow_metadata: Optional[IoArgoprojWorkflowV1alpha1WorkflowMetadata] = Field(default=None, alias="workflowMetadata") + workflow_template_ref: Optional[IoArgoprojWorkflowV1alpha1WorkflowTemplateRef] = Field(default=None, alias="workflowTemplateRef") + __properties: ClassVar[List[str]] = ["activeDeadlineSeconds", "affinity", "archiveLogs", "arguments", "artifactGC", "artifactRepositoryRef", "automountServiceAccountToken", "dnsConfig", "dnsPolicy", "entrypoint", "executor", "hooks", "hostAliases", "hostNetwork", "imagePullSecrets", "metrics", "nodeSelector", "onExit", "parallelism", "podDisruptionBudget", "podGC", "podMetadata", "podPriority", "podPriorityClassName", "podSpecPatch", "priority", "retryStrategy", "schedulerName", "securityContext", "serviceAccountName", "shutdown", "suspend", "synchronization", "templateDefaults", "templates", "tolerations", "ttlStrategy", "volumeClaimGC", "volumeClaimTemplates", "volumes", "workflowMetadata", "workflowTemplateRef"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of affinity + if self.affinity: + _dict['affinity'] = self.affinity.to_dict() + # override the default output from pydantic by calling `to_dict()` of arguments + if self.arguments: + _dict['arguments'] = self.arguments.to_dict() + # override the default output from pydantic by calling `to_dict()` of artifact_gc + if self.artifact_gc: + _dict['artifactGC'] = self.artifact_gc.to_dict() + # override the default output from pydantic by calling `to_dict()` of artifact_repository_ref + if self.artifact_repository_ref: + _dict['artifactRepositoryRef'] = self.artifact_repository_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of dns_config + if self.dns_config: + _dict['dnsConfig'] = self.dns_config.to_dict() + # override the default output from pydantic by calling `to_dict()` of executor + if self.executor: + _dict['executor'] = self.executor.to_dict() + # override the default output from pydantic by calling `to_dict()` of each value in hooks (dict) + _field_dict = {} + if self.hooks: + for _key in self.hooks: + if self.hooks[_key]: + _field_dict[_key] = self.hooks[_key].to_dict() + _dict['hooks'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each item in host_aliases (list) + _items = [] + if self.host_aliases: + for _item in self.host_aliases: + if _item: + _items.append(_item.to_dict()) + _dict['hostAliases'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in image_pull_secrets (list) + _items = [] + if self.image_pull_secrets: + for _item in self.image_pull_secrets: + if _item: + _items.append(_item.to_dict()) + _dict['imagePullSecrets'] = _items + # override the default output from pydantic by calling `to_dict()` of metrics + if self.metrics: + _dict['metrics'] = self.metrics.to_dict() + # override the default output from pydantic by calling `to_dict()` of pod_disruption_budget + if self.pod_disruption_budget: + _dict['podDisruptionBudget'] = self.pod_disruption_budget.to_dict() + # override the default output from pydantic by calling `to_dict()` of pod_gc + if self.pod_gc: + _dict['podGC'] = self.pod_gc.to_dict() + # override the default output from pydantic by calling `to_dict()` of pod_metadata + if self.pod_metadata: + _dict['podMetadata'] = self.pod_metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of retry_strategy + if self.retry_strategy: + _dict['retryStrategy'] = self.retry_strategy.to_dict() + # override the default output from pydantic by calling `to_dict()` of security_context + if self.security_context: + _dict['securityContext'] = self.security_context.to_dict() + # override the default output from pydantic by calling `to_dict()` of synchronization + if self.synchronization: + _dict['synchronization'] = self.synchronization.to_dict() + # override the default output from pydantic by calling `to_dict()` of template_defaults + if self.template_defaults: + _dict['templateDefaults'] = self.template_defaults.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in templates (list) + _items = [] + if self.templates: + for _item in self.templates: + if _item: + _items.append(_item.to_dict()) + _dict['templates'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in tolerations (list) + _items = [] + if self.tolerations: + for _item in self.tolerations: + if _item: + _items.append(_item.to_dict()) + _dict['tolerations'] = _items + # override the default output from pydantic by calling `to_dict()` of ttl_strategy + if self.ttl_strategy: + _dict['ttlStrategy'] = self.ttl_strategy.to_dict() + # override the default output from pydantic by calling `to_dict()` of volume_claim_gc + if self.volume_claim_gc: + _dict['volumeClaimGC'] = self.volume_claim_gc.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in volume_claim_templates (list) + _items = [] + if self.volume_claim_templates: + for _item in self.volume_claim_templates: + if _item: + _items.append(_item.to_dict()) + _dict['volumeClaimTemplates'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in volumes (list) + _items = [] + if self.volumes: + for _item in self.volumes: + if _item: + _items.append(_item.to_dict()) + _dict['volumes'] = _items + # override the default output from pydantic by calling `to_dict()` of workflow_metadata + if self.workflow_metadata: + _dict['workflowMetadata'] = self.workflow_metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of workflow_template_ref + if self.workflow_template_ref: + _dict['workflowTemplateRef'] = self.workflow_template_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "activeDeadlineSeconds": obj.get("activeDeadlineSeconds"), + "affinity": Affinity.from_dict(obj["affinity"]) if obj.get("affinity") is not None else None, + "archiveLogs": obj.get("archiveLogs"), + "arguments": IoArgoprojWorkflowV1alpha1Arguments.from_dict(obj["arguments"]) if obj.get("arguments") is not None else None, + "artifactGC": IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC.from_dict(obj["artifactGC"]) if obj.get("artifactGC") is not None else None, + "artifactRepositoryRef": IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef.from_dict(obj["artifactRepositoryRef"]) if obj.get("artifactRepositoryRef") is not None else None, + "automountServiceAccountToken": obj.get("automountServiceAccountToken"), + "dnsConfig": PodDNSConfig.from_dict(obj["dnsConfig"]) if obj.get("dnsConfig") is not None else None, + "dnsPolicy": obj.get("dnsPolicy"), + "entrypoint": obj.get("entrypoint"), + "executor": IoArgoprojWorkflowV1alpha1ExecutorConfig.from_dict(obj["executor"]) if obj.get("executor") is not None else None, + "hooks": dict( + (_k, IoArgoprojWorkflowV1alpha1LifecycleHook.from_dict(_v)) + for _k, _v in obj["hooks"].items() + ) + if obj.get("hooks") is not None + else None, + "hostAliases": [HostAlias.from_dict(_item) for _item in obj["hostAliases"]] if obj.get("hostAliases") is not None else None, + "hostNetwork": obj.get("hostNetwork"), + "imagePullSecrets": [LocalObjectReference.from_dict(_item) for _item in obj["imagePullSecrets"]] if obj.get("imagePullSecrets") is not None else None, + "metrics": IoArgoprojWorkflowV1alpha1Metrics.from_dict(obj["metrics"]) if obj.get("metrics") is not None else None, + "nodeSelector": obj.get("nodeSelector"), + "onExit": obj.get("onExit"), + "parallelism": obj.get("parallelism"), + "podDisruptionBudget": IoK8sApiPolicyV1PodDisruptionBudgetSpec.from_dict(obj["podDisruptionBudget"]) if obj.get("podDisruptionBudget") is not None else None, + "podGC": IoArgoprojWorkflowV1alpha1PodGC.from_dict(obj["podGC"]) if obj.get("podGC") is not None else None, + "podMetadata": IoArgoprojWorkflowV1alpha1Metadata.from_dict(obj["podMetadata"]) if obj.get("podMetadata") is not None else None, + "podPriority": obj.get("podPriority"), + "podPriorityClassName": obj.get("podPriorityClassName"), + "podSpecPatch": obj.get("podSpecPatch"), + "priority": obj.get("priority"), + "retryStrategy": IoArgoprojWorkflowV1alpha1RetryStrategy.from_dict(obj["retryStrategy"]) if obj.get("retryStrategy") is not None else None, + "schedulerName": obj.get("schedulerName"), + "securityContext": PodSecurityContext.from_dict(obj["securityContext"]) if obj.get("securityContext") is not None else None, + "serviceAccountName": obj.get("serviceAccountName"), + "shutdown": obj.get("shutdown"), + "suspend": obj.get("suspend"), + "synchronization": IoArgoprojWorkflowV1alpha1Synchronization.from_dict(obj["synchronization"]) if obj.get("synchronization") is not None else None, + "templateDefaults": IoArgoprojWorkflowV1alpha1Template.from_dict(obj["templateDefaults"]) if obj.get("templateDefaults") is not None else None, + "templates": [IoArgoprojWorkflowV1alpha1Template.from_dict(_item) for _item in obj["templates"]] if obj.get("templates") is not None else None, + "tolerations": [Toleration.from_dict(_item) for _item in obj["tolerations"]] if obj.get("tolerations") is not None else None, + "ttlStrategy": IoArgoprojWorkflowV1alpha1TTLStrategy.from_dict(obj["ttlStrategy"]) if obj.get("ttlStrategy") is not None else None, + "volumeClaimGC": IoArgoprojWorkflowV1alpha1VolumeClaimGC.from_dict(obj["volumeClaimGC"]) if obj.get("volumeClaimGC") is not None else None, + "volumeClaimTemplates": [PersistentVolumeClaim.from_dict(_item) for _item in obj["volumeClaimTemplates"]] if obj.get("volumeClaimTemplates") is not None else None, + "volumes": [Volume.from_dict(_item) for _item in obj["volumes"]] if obj.get("volumes") is not None else None, + "workflowMetadata": IoArgoprojWorkflowV1alpha1WorkflowMetadata.from_dict(obj["workflowMetadata"]) if obj.get("workflowMetadata") is not None else None, + "workflowTemplateRef": IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.from_dict(obj["workflowTemplateRef"]) if obj.get("workflowTemplateRef") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_status.py new file mode 100644 index 000000000000..a509fed1fe12 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_status.py @@ -0,0 +1,186 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_art_gc_status import IoArgoprojWorkflowV1alpha1ArtGCStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository_ref_status import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_status import IoArgoprojWorkflowV1alpha1NodeStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs +from argo_workflows.models.io_argoproj_workflow_v1alpha1_synchronization_status import IoArgoprojWorkflowV1alpha1SynchronizationStatus +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec +from argo_workflows.models.volume import Volume +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowStatus(BaseModel): + """ + WorkflowStatus contains overall status information about a workflow + """ # noqa: E501 + artifact_gc_status: Optional[IoArgoprojWorkflowV1alpha1ArtGCStatus] = Field(default=None, alias="artifactGCStatus") + artifact_repository_ref: Optional[IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus] = Field(default=None, alias="artifactRepositoryRef") + compressed_nodes: Optional[StrictStr] = Field(default=None, description="Compressed and base64 decoded Nodes map", alias="compressedNodes") + conditions: Optional[List[IoArgoprojWorkflowV1alpha1Condition]] = Field(default=None, description="Conditions is a list of conditions the Workflow may have") + estimated_duration: Optional[StrictInt] = Field(default=None, description="EstimatedDuration in seconds.", alias="estimatedDuration") + finished_at: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="finishedAt") + message: Optional[StrictStr] = Field(default=None, description="A human readable message indicating details about why the workflow is in this condition.") + nodes: Optional[Dict[str, IoArgoprojWorkflowV1alpha1NodeStatus]] = Field(default=None, description="Nodes is a mapping between a node ID and the node's status.") + offload_node_status_version: Optional[StrictStr] = Field(default=None, description="Whether on not node status has been offloaded to a database. If exists, then Nodes and CompressedNodes will be empty. This will actually be populated with a hash of the offloaded data.", alias="offloadNodeStatusVersion") + outputs: Optional[IoArgoprojWorkflowV1alpha1Outputs] = None + persistent_volume_claims: Optional[List[Volume]] = Field(default=None, description="PersistentVolumeClaims tracks all PVCs that were created as part of the io.argoproj.workflow.v1alpha1. The contents of this list are drained at the end of the workflow.", alias="persistentVolumeClaims") + phase: Optional[StrictStr] = Field(default=None, description="Phase a simple, high-level summary of where the workflow is in its lifecycle. Will be \"\" (Unknown), \"Pending\", or \"Running\" before the workflow is completed, and \"Succeeded\", \"Failed\" or \"Error\" once the workflow has completed.") + progress: Optional[StrictStr] = Field(default=None, description="Progress to completion") + resources_duration: Optional[Dict[str, StrictInt]] = Field(default=None, description="ResourcesDuration is the total for the workflow", alias="resourcesDuration") + started_at: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="startedAt") + stored_templates: Optional[Dict[str, IoArgoprojWorkflowV1alpha1Template]] = Field(default=None, description="StoredTemplates is a mapping between a template ref and the node's status.", alias="storedTemplates") + stored_workflow_template_spec: Optional[IoArgoprojWorkflowV1alpha1WorkflowSpec] = Field(default=None, alias="storedWorkflowTemplateSpec") + synchronization: Optional[IoArgoprojWorkflowV1alpha1SynchronizationStatus] = None + task_results_completion_status: Optional[Dict[str, StrictBool]] = Field(default=None, description="TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection.", alias="taskResultsCompletionStatus") + __properties: ClassVar[List[str]] = ["artifactGCStatus", "artifactRepositoryRef", "compressedNodes", "conditions", "estimatedDuration", "finishedAt", "message", "nodes", "offloadNodeStatusVersion", "outputs", "persistentVolumeClaims", "phase", "progress", "resourcesDuration", "startedAt", "storedTemplates", "storedWorkflowTemplateSpec", "synchronization", "taskResultsCompletionStatus"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of artifact_gc_status + if self.artifact_gc_status: + _dict['artifactGCStatus'] = self.artifact_gc_status.to_dict() + # override the default output from pydantic by calling `to_dict()` of artifact_repository_ref + if self.artifact_repository_ref: + _dict['artifactRepositoryRef'] = self.artifact_repository_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in conditions (list) + _items = [] + if self.conditions: + for _item in self.conditions: + if _item: + _items.append(_item.to_dict()) + _dict['conditions'] = _items + # override the default output from pydantic by calling `to_dict()` of each value in nodes (dict) + _field_dict = {} + if self.nodes: + for _key in self.nodes: + if self.nodes[_key]: + _field_dict[_key] = self.nodes[_key].to_dict() + _dict['nodes'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of outputs + if self.outputs: + _dict['outputs'] = self.outputs.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in persistent_volume_claims (list) + _items = [] + if self.persistent_volume_claims: + for _item in self.persistent_volume_claims: + if _item: + _items.append(_item.to_dict()) + _dict['persistentVolumeClaims'] = _items + # override the default output from pydantic by calling `to_dict()` of each value in stored_templates (dict) + _field_dict = {} + if self.stored_templates: + for _key in self.stored_templates: + if self.stored_templates[_key]: + _field_dict[_key] = self.stored_templates[_key].to_dict() + _dict['storedTemplates'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of stored_workflow_template_spec + if self.stored_workflow_template_spec: + _dict['storedWorkflowTemplateSpec'] = self.stored_workflow_template_spec.to_dict() + # override the default output from pydantic by calling `to_dict()` of synchronization + if self.synchronization: + _dict['synchronization'] = self.synchronization.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifactGCStatus": IoArgoprojWorkflowV1alpha1ArtGCStatus.from_dict(obj["artifactGCStatus"]) if obj.get("artifactGCStatus") is not None else None, + "artifactRepositoryRef": IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.from_dict(obj["artifactRepositoryRef"]) if obj.get("artifactRepositoryRef") is not None else None, + "compressedNodes": obj.get("compressedNodes"), + "conditions": [IoArgoprojWorkflowV1alpha1Condition.from_dict(_item) for _item in obj["conditions"]] if obj.get("conditions") is not None else None, + "estimatedDuration": obj.get("estimatedDuration"), + "finishedAt": obj.get("finishedAt"), + "message": obj.get("message"), + "nodes": dict( + (_k, IoArgoprojWorkflowV1alpha1NodeStatus.from_dict(_v)) + for _k, _v in obj["nodes"].items() + ) + if obj.get("nodes") is not None + else None, + "offloadNodeStatusVersion": obj.get("offloadNodeStatusVersion"), + "outputs": IoArgoprojWorkflowV1alpha1Outputs.from_dict(obj["outputs"]) if obj.get("outputs") is not None else None, + "persistentVolumeClaims": [Volume.from_dict(_item) for _item in obj["persistentVolumeClaims"]] if obj.get("persistentVolumeClaims") is not None else None, + "phase": obj.get("phase"), + "progress": obj.get("progress"), + "resourcesDuration": obj.get("resourcesDuration"), + "startedAt": obj.get("startedAt"), + "storedTemplates": dict( + (_k, IoArgoprojWorkflowV1alpha1Template.from_dict(_v)) + for _k, _v in obj["storedTemplates"].items() + ) + if obj.get("storedTemplates") is not None + else None, + "storedWorkflowTemplateSpec": IoArgoprojWorkflowV1alpha1WorkflowSpec.from_dict(obj["storedWorkflowTemplateSpec"]) if obj.get("storedWorkflowTemplateSpec") is not None else None, + "synchronization": IoArgoprojWorkflowV1alpha1SynchronizationStatus.from_dict(obj["synchronization"]) if obj.get("synchronization") is not None else None, + "taskResultsCompletionStatus": obj.get("taskResultsCompletionStatus") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_step.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_step.py new file mode 100644 index 000000000000..720d8842a0df --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_step.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments +from argo_workflows.models.io_argoproj_workflow_v1alpha1_continue_on import IoArgoprojWorkflowV1alpha1ContinueOn +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook +from argo_workflows.models.io_argoproj_workflow_v1alpha1_sequence import IoArgoprojWorkflowV1alpha1Sequence +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowStep(BaseModel): + """ + WorkflowStep is a reference to a template to execute in a series of step + """ # noqa: E501 + arguments: Optional[IoArgoprojWorkflowV1alpha1Arguments] = None + continue_on: Optional[IoArgoprojWorkflowV1alpha1ContinueOn] = Field(default=None, alias="continueOn") + hooks: Optional[Dict[str, IoArgoprojWorkflowV1alpha1LifecycleHook]] = Field(default=None, description="Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step") + inline: Optional[IoArgoprojWorkflowV1alpha1Template] = None + name: Optional[StrictStr] = Field(default=None, description="Name of the step") + on_exit: Optional[StrictStr] = Field(default=None, description="OnExit is a template reference which is invoked at the end of the template, irrespective of the success, failure, or error of the primary template. DEPRECATED: Use Hooks[exit].Template instead.", alias="onExit") + template: Optional[StrictStr] = Field(default=None, description="Template is the name of the template to execute as the step") + template_ref: Optional[IoArgoprojWorkflowV1alpha1TemplateRef] = Field(default=None, alias="templateRef") + when: Optional[StrictStr] = Field(default=None, description="When is an expression in which the step should conditionally execute") + with_items: Optional[List[Dict[str, Any]]] = Field(default=None, description="WithItems expands a step into multiple parallel steps from the items in the list", alias="withItems") + with_param: Optional[StrictStr] = Field(default=None, description="WithParam expands a step into multiple parallel steps from the value in the parameter, which is expected to be a JSON list.", alias="withParam") + with_sequence: Optional[IoArgoprojWorkflowV1alpha1Sequence] = Field(default=None, alias="withSequence") + __properties: ClassVar[List[str]] = ["arguments", "continueOn", "hooks", "inline", "name", "onExit", "template", "templateRef", "when", "withItems", "withParam", "withSequence"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowStep from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of arguments + if self.arguments: + _dict['arguments'] = self.arguments.to_dict() + # override the default output from pydantic by calling `to_dict()` of continue_on + if self.continue_on: + _dict['continueOn'] = self.continue_on.to_dict() + # override the default output from pydantic by calling `to_dict()` of each value in hooks (dict) + _field_dict = {} + if self.hooks: + for _key in self.hooks: + if self.hooks[_key]: + _field_dict[_key] = self.hooks[_key].to_dict() + _dict['hooks'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of inline + if self.inline: + _dict['inline'] = self.inline.to_dict() + # override the default output from pydantic by calling `to_dict()` of template_ref + if self.template_ref: + _dict['templateRef'] = self.template_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of with_sequence + if self.with_sequence: + _dict['withSequence'] = self.with_sequence.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowStep from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "arguments": IoArgoprojWorkflowV1alpha1Arguments.from_dict(obj["arguments"]) if obj.get("arguments") is not None else None, + "continueOn": IoArgoprojWorkflowV1alpha1ContinueOn.from_dict(obj["continueOn"]) if obj.get("continueOn") is not None else None, + "hooks": dict( + (_k, IoArgoprojWorkflowV1alpha1LifecycleHook.from_dict(_v)) + for _k, _v in obj["hooks"].items() + ) + if obj.get("hooks") is not None + else None, + "inline": IoArgoprojWorkflowV1alpha1Template.from_dict(obj["inline"]) if obj.get("inline") is not None else None, + "name": obj.get("name"), + "onExit": obj.get("onExit"), + "template": obj.get("template"), + "templateRef": IoArgoprojWorkflowV1alpha1TemplateRef.from_dict(obj["templateRef"]) if obj.get("templateRef") is not None else None, + "when": obj.get("when"), + "withItems": obj.get("withItems"), + "withParam": obj.get("withParam"), + "withSequence": IoArgoprojWorkflowV1alpha1Sequence.from_dict(obj["withSequence"]) if obj.get("withSequence") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_stop_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_stop_request.py new file mode 100644 index 000000000000..6c2fdbbd33b8 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_stop_request.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowStopRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowStopRequest + """ # noqa: E501 + message: Optional[StrictStr] = None + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + node_field_selector: Optional[StrictStr] = Field(default=None, alias="nodeFieldSelector") + __properties: ClassVar[List[str]] = ["message", "name", "namespace", "nodeFieldSelector"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowStopRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowStopRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "message": obj.get("message"), + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "nodeFieldSelector": obj.get("nodeFieldSelector") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_submit_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_submit_request.py new file mode 100644 index 000000000000..d3e962237c3d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_submit_request.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_submit_opts import IoArgoprojWorkflowV1alpha1SubmitOpts +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest + """ # noqa: E501 + namespace: Optional[StrictStr] = None + resource_kind: Optional[StrictStr] = Field(default=None, alias="resourceKind") + resource_name: Optional[StrictStr] = Field(default=None, alias="resourceName") + submit_options: Optional[IoArgoprojWorkflowV1alpha1SubmitOpts] = Field(default=None, alias="submitOptions") + __properties: ClassVar[List[str]] = ["namespace", "resourceKind", "resourceName", "submitOptions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of submit_options + if self.submit_options: + _dict['submitOptions'] = self.submit_options.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "namespace": obj.get("namespace"), + "resourceKind": obj.get("resourceKind"), + "resourceName": obj.get("resourceName"), + "submitOptions": IoArgoprojWorkflowV1alpha1SubmitOpts.from_dict(obj["submitOptions"]) if obj.get("submitOptions") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_suspend_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_suspend_request.py new file mode 100644 index 000000000000..e4e4e1604cc9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_suspend_request.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest + """ # noqa: E501 + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_task_set_spec.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_task_set_spec.py new file mode 100644 index 000000000000..4366f0449a5d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_task_set_spec.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec + """ # noqa: E501 + tasks: Optional[Dict[str, IoArgoprojWorkflowV1alpha1Template]] = None + __properties: ClassVar[List[str]] = ["tasks"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each value in tasks (dict) + _field_dict = {} + if self.tasks: + for _key in self.tasks: + if self.tasks[_key]: + _field_dict[_key] = self.tasks[_key].to_dict() + _dict['tasks'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "tasks": dict( + (_k, IoArgoprojWorkflowV1alpha1Template.from_dict(_v)) + for _k, _v in obj["tasks"].items() + ) + if obj.get("tasks") is not None + else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_task_set_status.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_task_set_status.py new file mode 100644 index 000000000000..2f500db4bdff --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_task_set_status.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_result import IoArgoprojWorkflowV1alpha1NodeResult +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus + """ # noqa: E501 + nodes: Optional[Dict[str, IoArgoprojWorkflowV1alpha1NodeResult]] = None + __properties: ClassVar[List[str]] = ["nodes"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each value in nodes (dict) + _field_dict = {} + if self.nodes: + for _key in self.nodes: + if self.nodes[_key]: + _field_dict[_key] = self.nodes[_key].to_dict() + _dict['nodes'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodes": dict( + (_k, IoArgoprojWorkflowV1alpha1NodeResult.from_dict(_v)) + for _k, _v in obj["nodes"].items() + ) + if obj.get("nodes") is not None + else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template.py new file mode 100644 index 000000000000..90048041206f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec +from argo_workflows.models.object_meta import ObjectMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowTemplate(BaseModel): + """ + WorkflowTemplate is the definition of a workflow template resource + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ObjectMeta + spec: IoArgoprojWorkflowV1alpha1WorkflowSpec + __properties: ClassVar[List[str]] = ["apiVersion", "kind", "metadata", "spec"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of spec + if self.spec: + _dict['spec'] = self.spec.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "kind": obj.get("kind"), + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "spec": IoArgoprojWorkflowV1alpha1WorkflowSpec.from_dict(obj["spec"]) if obj.get("spec") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_create_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_create_request.py new file mode 100644 index 000000000000..c3d3b69658e1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_create_request.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.create_options import CreateOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest + """ # noqa: E501 + create_options: Optional[CreateOptions] = Field(default=None, alias="createOptions") + namespace: Optional[StrictStr] = None + template: Optional[IoArgoprojWorkflowV1alpha1WorkflowTemplate] = None + __properties: ClassVar[List[str]] = ["createOptions", "namespace", "template"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of create_options + if self.create_options: + _dict['createOptions'] = self.create_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of template + if self.template: + _dict['template'] = self.template.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "createOptions": CreateOptions.from_dict(obj["createOptions"]) if obj.get("createOptions") is not None else None, + "namespace": obj.get("namespace"), + "template": IoArgoprojWorkflowV1alpha1WorkflowTemplate.from_dict(obj["template"]) if obj.get("template") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_lint_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_lint_request.py new file mode 100644 index 000000000000..097339c7ea5a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_lint_request.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.create_options import CreateOptions +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest + """ # noqa: E501 + create_options: Optional[CreateOptions] = Field(default=None, alias="createOptions") + namespace: Optional[StrictStr] = None + template: Optional[IoArgoprojWorkflowV1alpha1WorkflowTemplate] = None + __properties: ClassVar[List[str]] = ["createOptions", "namespace", "template"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of create_options + if self.create_options: + _dict['createOptions'] = self.create_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of template + if self.template: + _dict['template'] = self.template.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "createOptions": CreateOptions.from_dict(obj["createOptions"]) if obj.get("createOptions") is not None else None, + "namespace": obj.get("namespace"), + "template": IoArgoprojWorkflowV1alpha1WorkflowTemplate.from_dict(obj["template"]) if obj.get("template") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_list.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_list.py new file mode 100644 index 000000000000..9abecc7565f0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_list.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.list_meta import ListMeta +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowTemplateList(BaseModel): + """ + WorkflowTemplateList is list of WorkflowTemplate resources + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + items: List[IoArgoprojWorkflowV1alpha1WorkflowTemplate] + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: ListMeta + __properties: ClassVar[List[str]] = ["apiVersion", "items", "kind", "metadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "items": [IoArgoprojWorkflowV1alpha1WorkflowTemplate.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "kind": obj.get("kind"), + "metadata": ListMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_ref.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_ref.py new file mode 100644 index 000000000000..9b8cadb19b92 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_ref.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowTemplateRef(BaseModel): + """ + WorkflowTemplateRef is a reference to a WorkflowTemplate resource. + """ # noqa: E501 + cluster_scope: Optional[StrictBool] = Field(default=None, description="ClusterScope indicates the referred template is cluster scoped (i.e. a ClusterWorkflowTemplate).", alias="clusterScope") + name: Optional[StrictStr] = Field(default=None, description="Name is the resource name of the workflow template.") + __properties: ClassVar[List[str]] = ["clusterScope", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateRef from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateRef from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "clusterScope": obj.get("clusterScope"), + "name": obj.get("name") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_update_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_update_request.py new file mode 100644 index 000000000000..9d9081403a8b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_template_update_request.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest + """ # noqa: E501 + name: Optional[StrictStr] = Field(default=None, description="DEPRECATED: This field is ignored.") + namespace: Optional[StrictStr] = None + template: Optional[IoArgoprojWorkflowV1alpha1WorkflowTemplate] = None + __properties: ClassVar[List[str]] = ["name", "namespace", "template"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of template + if self.template: + _dict['template'] = self.template.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "template": IoArgoprojWorkflowV1alpha1WorkflowTemplate.from_dict(obj["template"]) if obj.get("template") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_terminate_request.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_terminate_request.py new file mode 100644 index 000000000000..cc6d6bcfba4a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_terminate_request.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest + """ # noqa: E501 + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name", "namespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_watch_event.py b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_watch_event.py new file mode 100644 index 000000000000..25b0ab94cf81 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_argoproj_workflow_v1alpha1_workflow_watch_event.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from typing import Optional, Set +from typing_extensions import Self + +class IoArgoprojWorkflowV1alpha1WorkflowWatchEvent(BaseModel): + """ + IoArgoprojWorkflowV1alpha1WorkflowWatchEvent + """ # noqa: E501 + object: Optional[IoArgoprojWorkflowV1alpha1Workflow] = None + type: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["object", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowWatchEvent from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of object + if self.object: + _dict['object'] = self.object.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoArgoprojWorkflowV1alpha1WorkflowWatchEvent from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "object": IoArgoprojWorkflowV1alpha1Workflow.from_dict(obj["object"]) if obj.get("object") is not None else None, + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/io_k8s_api_policy_v1_pod_disruption_budget_spec.py b/sdks/python/client/argo_workflows/models/io_k8s_api_policy_v1_pod_disruption_budget_spec.py new file mode 100644 index 000000000000..90c5a4ac29a0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/io_k8s_api_policy_v1_pod_disruption_budget_spec.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.label_selector import LabelSelector +from typing import Optional, Set +from typing_extensions import Self + +class IoK8sApiPolicyV1PodDisruptionBudgetSpec(BaseModel): + """ + PodDisruptionBudgetSpec is a description of a PodDisruptionBudget. + """ # noqa: E501 + max_unavailable: Optional[StrictStr] = Field(default=None, alias="maxUnavailable") + min_available: Optional[StrictStr] = Field(default=None, alias="minAvailable") + selector: Optional[LabelSelector] = None + __properties: ClassVar[List[str]] = ["maxUnavailable", "minAvailable", "selector"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of IoK8sApiPolicyV1PodDisruptionBudgetSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of selector + if self.selector: + _dict['selector'] = self.selector.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of IoK8sApiPolicyV1PodDisruptionBudgetSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "maxUnavailable": obj.get("maxUnavailable"), + "minAvailable": obj.get("minAvailable"), + "selector": LabelSelector.from_dict(obj["selector"]) if obj.get("selector") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/iscsi_volume_source.py b/sdks/python/client/argo_workflows/models/iscsi_volume_source.py new file mode 100644 index 000000000000..6ed6cd008267 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/iscsi_volume_source.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.local_object_reference import LocalObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class ISCSIVolumeSource(BaseModel): + """ + Represents an ISCSI disk. ISCSI volumes can only be mounted as read/write once. ISCSI volumes support ownership management and SELinux relabeling. + """ # noqa: E501 + chap_auth_discovery: Optional[StrictBool] = Field(default=None, description="whether support iSCSI Discovery CHAP authentication", alias="chapAuthDiscovery") + chap_auth_session: Optional[StrictBool] = Field(default=None, description="whether support iSCSI Session CHAP authentication", alias="chapAuthSession") + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi", alias="fsType") + initiator_name: Optional[StrictStr] = Field(default=None, description="Custom iSCSI Initiator Name. If initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface : will be created for the connection.", alias="initiatorName") + iqn: StrictStr = Field(description="Target iSCSI Qualified Name.") + iscsi_interface: Optional[StrictStr] = Field(default=None, description="iSCSI Interface Name that uses an iSCSI transport. Defaults to 'default' (tcp).", alias="iscsiInterface") + lun: StrictInt = Field(description="iSCSI Target Lun number.") + portals: Optional[List[StrictStr]] = Field(default=None, description="iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260).") + read_only: Optional[StrictBool] = Field(default=None, description="ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false.", alias="readOnly") + secret_ref: Optional[LocalObjectReference] = Field(default=None, alias="secretRef") + target_portal: StrictStr = Field(description="iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260).", alias="targetPortal") + __properties: ClassVar[List[str]] = ["chapAuthDiscovery", "chapAuthSession", "fsType", "initiatorName", "iqn", "iscsiInterface", "lun", "portals", "readOnly", "secretRef", "targetPortal"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ISCSIVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of secret_ref + if self.secret_ref: + _dict['secretRef'] = self.secret_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ISCSIVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "chapAuthDiscovery": obj.get("chapAuthDiscovery"), + "chapAuthSession": obj.get("chapAuthSession"), + "fsType": obj.get("fsType"), + "initiatorName": obj.get("initiatorName"), + "iqn": obj.get("iqn"), + "iscsiInterface": obj.get("iscsiInterface"), + "lun": obj.get("lun"), + "portals": obj.get("portals"), + "readOnly": obj.get("readOnly"), + "secretRef": LocalObjectReference.from_dict(obj["secretRef"]) if obj.get("secretRef") is not None else None, + "targetPortal": obj.get("targetPortal") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/key_to_path.py b/sdks/python/client/argo_workflows/models/key_to_path.py new file mode 100644 index 000000000000..b177776a0928 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/key_to_path.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class KeyToPath(BaseModel): + """ + Maps a string key to a path within a volume. + """ # noqa: E501 + key: StrictStr = Field(description="The key to project.") + mode: Optional[StrictInt] = Field(default=None, description="Optional: mode bits used to set permissions on this file. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.") + path: StrictStr = Field(description="The relative path of the file to map the key to. May not be an absolute path. May not contain the path element '..'. May not start with the string '..'.") + __properties: ClassVar[List[str]] = ["key", "mode", "path"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of KeyToPath from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of KeyToPath from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "mode": obj.get("mode"), + "path": obj.get("path") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/label_selector.py b/sdks/python/client/argo_workflows/models/label_selector.py new file mode 100644 index 000000000000..69e609d70671 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/label_selector.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.label_selector_requirement import LabelSelectorRequirement +from typing import Optional, Set +from typing_extensions import Self + +class LabelSelector(BaseModel): + """ + A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects. + """ # noqa: E501 + match_expressions: Optional[List[LabelSelectorRequirement]] = Field(default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", alias="matchExpressions") + match_labels: Optional[Dict[str, StrictStr]] = Field(default=None, description="matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is \"key\", the operator is \"In\", and the values array contains only \"value\". The requirements are ANDed.", alias="matchLabels") + __properties: ClassVar[List[str]] = ["matchExpressions", "matchLabels"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LabelSelector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in match_expressions (list) + _items = [] + if self.match_expressions: + for _item in self.match_expressions: + if _item: + _items.append(_item.to_dict()) + _dict['matchExpressions'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LabelSelector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "matchExpressions": [LabelSelectorRequirement.from_dict(_item) for _item in obj["matchExpressions"]] if obj.get("matchExpressions") is not None else None, + "matchLabels": obj.get("matchLabels") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/label_selector_requirement.py b/sdks/python/client/argo_workflows/models/label_selector_requirement.py new file mode 100644 index 000000000000..265c0482f05f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/label_selector_requirement.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class LabelSelectorRequirement(BaseModel): + """ + A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. + """ # noqa: E501 + key: StrictStr = Field(description="key is the label key that the selector applies to.") + operator: StrictStr = Field(description="operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.") + values: Optional[List[StrictStr]] = Field(default=None, description="values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.") + __properties: ClassVar[List[str]] = ["key", "operator", "values"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LabelSelectorRequirement from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LabelSelectorRequirement from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "operator": obj.get("operator"), + "values": obj.get("values") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/lifecycle.py b/sdks/python/client/argo_workflows/models/lifecycle.py new file mode 100644 index 000000000000..41ad6821e160 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/lifecycle.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.lifecycle_handler import LifecycleHandler +from typing import Optional, Set +from typing_extensions import Self + +class Lifecycle(BaseModel): + """ + Lifecycle describes actions that the management system should take in response to container lifecycle events. For the PostStart and PreStop lifecycle handlers, management of the container blocks until the action is complete, unless the container process fails, in which case the handler is aborted. + """ # noqa: E501 + post_start: Optional[LifecycleHandler] = Field(default=None, alias="postStart") + pre_stop: Optional[LifecycleHandler] = Field(default=None, alias="preStop") + __properties: ClassVar[List[str]] = ["postStart", "preStop"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Lifecycle from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of post_start + if self.post_start: + _dict['postStart'] = self.post_start.to_dict() + # override the default output from pydantic by calling `to_dict()` of pre_stop + if self.pre_stop: + _dict['preStop'] = self.pre_stop.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Lifecycle from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "postStart": LifecycleHandler.from_dict(obj["postStart"]) if obj.get("postStart") is not None else None, + "preStop": LifecycleHandler.from_dict(obj["preStop"]) if obj.get("preStop") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/lifecycle_handler.py b/sdks/python/client/argo_workflows/models/lifecycle_handler.py new file mode 100644 index 000000000000..65c8858e5f1b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/lifecycle_handler.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.exec_action import ExecAction +from argo_workflows.models.http_get_action import HTTPGetAction +from argo_workflows.models.tcp_socket_action import TCPSocketAction +from typing import Optional, Set +from typing_extensions import Self + +class LifecycleHandler(BaseModel): + """ + LifecycleHandler defines a specific action that should be taken in a lifecycle hook. One and only one of the fields, except TCPSocket must be specified. + """ # noqa: E501 + var_exec: Optional[ExecAction] = Field(default=None, alias="exec") + http_get: Optional[HTTPGetAction] = Field(default=None, alias="httpGet") + tcp_socket: Optional[TCPSocketAction] = Field(default=None, alias="tcpSocket") + __properties: ClassVar[List[str]] = ["exec", "httpGet", "tcpSocket"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LifecycleHandler from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of var_exec + if self.var_exec: + _dict['exec'] = self.var_exec.to_dict() + # override the default output from pydantic by calling `to_dict()` of http_get + if self.http_get: + _dict['httpGet'] = self.http_get.to_dict() + # override the default output from pydantic by calling `to_dict()` of tcp_socket + if self.tcp_socket: + _dict['tcpSocket'] = self.tcp_socket.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LifecycleHandler from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "exec": ExecAction.from_dict(obj["exec"]) if obj.get("exec") is not None else None, + "httpGet": HTTPGetAction.from_dict(obj["httpGet"]) if obj.get("httpGet") is not None else None, + "tcpSocket": TCPSocketAction.from_dict(obj["tcpSocket"]) if obj.get("tcpSocket") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/list_meta.py b/sdks/python/client/argo_workflows/models/list_meta.py new file mode 100644 index 000000000000..ebe5e31c178c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/list_meta.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ListMeta(BaseModel): + """ + ListMeta describes metadata that synthetic resources must have, including lists and various status objects. A resource may have only one of {ObjectMeta, ListMeta}. + """ # noqa: E501 + var_continue: Optional[StrictStr] = Field(default=None, description="continue may be set if the user set a limit on the number of items returned, and indicates that the server has more data available. The value is opaque and may be used to issue another request to the endpoint that served this list to retrieve the next set of available objects. Continuing a consistent list may not be possible if the server configuration has changed or more than a few minutes have passed. The resourceVersion field returned when using this continue value will be identical to the value in the first response, unless you have received this token from an error message.", alias="continue") + remaining_item_count: Optional[StrictInt] = Field(default=None, description="remainingItemCount is the number of subsequent items in the list which are not included in this list response. If the list request contained label or field selectors, then the number of remaining items is unknown and the field will be left unset and omitted during serialization. If the list is complete (either because it is not chunking or because this is the last chunk), then there are no more remaining items and this field will be left unset and omitted during serialization. Servers older than v1.15 do not set this field. The intended use of the remainingItemCount is *estimating* the size of a collection. Clients should not rely on the remainingItemCount to be set or to be exact.", alias="remainingItemCount") + resource_version: Optional[StrictStr] = Field(default=None, description="String that identifies the server's internal version of this object that can be used by clients to determine when objects have changed. Value must be treated as opaque by clients and passed unmodified back to the server. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency", alias="resourceVersion") + self_link: Optional[StrictStr] = Field(default=None, description="selfLink is a URL representing this object. Populated by the system. Read-only. DEPRECATED Kubernetes will stop propagating this field in 1.20 release and the field is planned to be removed in 1.21 release.", alias="selfLink") + __properties: ClassVar[List[str]] = ["continue", "remainingItemCount", "resourceVersion", "selfLink"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ListMeta from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ListMeta from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "continue": obj.get("continue"), + "remainingItemCount": obj.get("remainingItemCount"), + "resourceVersion": obj.get("resourceVersion"), + "selfLink": obj.get("selfLink") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/local_object_reference.py b/sdks/python/client/argo_workflows/models/local_object_reference.py new file mode 100644 index 000000000000..64c3bf854792 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/local_object_reference.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class LocalObjectReference(BaseModel): + """ + LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace. + """ # noqa: E501 + name: Optional[StrictStr] = Field(default=None, description="Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + __properties: ClassVar[List[str]] = ["name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LocalObjectReference from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LocalObjectReference from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/managed_fields_entry.py b/sdks/python/client/argo_workflows/models/managed_fields_entry.py new file mode 100644 index 000000000000..c650fe7fc7f3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/managed_fields_entry.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ManagedFieldsEntry(BaseModel): + """ + ManagedFieldsEntry is a workflow-id, a FieldSet and the group version of the resource that the fieldset applies to. + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the version of this resource that this field set applies to. The format is \"group/version\" just like the top-level APIVersion field. It is necessary to track the version of a field set because it cannot be automatically converted.", alias="apiVersion") + fields_type: Optional[StrictStr] = Field(default=None, description="FieldsType is the discriminator for the different fields format and version. There is currently only one possible value: \"FieldsV1\"", alias="fieldsType") + fields_v1: Optional[Dict[str, Any]] = Field(default=None, description="FieldsV1 stores a set of fields in a data structure like a Trie, in JSON format. Each key is either a '.' representing the field itself, and will always map to an empty set, or a string representing a sub-field or item. The string will follow one of these four formats: 'f:', where is the name of a field in a struct, or key in a map 'v:', where is the exact json formatted value of a list item 'i:', where is position of a item in a list 'k:', where is a map of a list item's key fields to their unique values If a key maps to an empty Fields value, the field that key represents is part of the set. The exact format is defined in sigs.k8s.io/structured-merge-diff", alias="fieldsV1") + manager: Optional[StrictStr] = Field(default=None, description="Manager is an identifier of the workflow managing these fields.") + operation: Optional[StrictStr] = Field(default=None, description="Operation is the type of operation which lead to this ManagedFieldsEntry being created. The only valid values for this field are 'Apply' and 'Update'.") + subresource: Optional[StrictStr] = Field(default=None, description="Subresource is the name of the subresource used to update that object, or empty string if the object was updated through the main resource. The value of this field is used to distinguish between managers, even if they share the same name. For example, a status update will be distinct from a regular update using the same manager name. Note that the APIVersion field is not related to the Subresource field and it always corresponds to the version of the main resource.") + time: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.") + __properties: ClassVar[List[str]] = ["apiVersion", "fieldsType", "fieldsV1", "manager", "operation", "subresource", "time"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ManagedFieldsEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ManagedFieldsEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "fieldsType": obj.get("fieldsType"), + "fieldsV1": obj.get("fieldsV1"), + "manager": obj.get("manager"), + "operation": obj.get("operation"), + "subresource": obj.get("subresource"), + "time": obj.get("time") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/nfs_volume_source.py b/sdks/python/client/argo_workflows/models/nfs_volume_source.py new file mode 100644 index 000000000000..2e1a81ba98ae --- /dev/null +++ b/sdks/python/client/argo_workflows/models/nfs_volume_source.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NFSVolumeSource(BaseModel): + """ + Represents an NFS mount that lasts the lifetime of a pod. NFS volumes do not support ownership management or SELinux relabeling. + """ # noqa: E501 + path: StrictStr = Field(description="Path that is exported by the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs") + read_only: Optional[StrictBool] = Field(default=None, description="ReadOnly here will force the NFS export to be mounted with read-only permissions. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs", alias="readOnly") + server: StrictStr = Field(description="Server is the hostname or IP address of the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs") + __properties: ClassVar[List[str]] = ["path", "readOnly", "server"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NFSVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NFSVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "path": obj.get("path"), + "readOnly": obj.get("readOnly"), + "server": obj.get("server") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/node_affinity.py b/sdks/python/client/argo_workflows/models/node_affinity.py new file mode 100644 index 000000000000..75e8f3a85d74 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/node_affinity.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.node_selector import NodeSelector +from argo_workflows.models.preferred_scheduling_term import PreferredSchedulingTerm +from typing import Optional, Set +from typing_extensions import Self + +class NodeAffinity(BaseModel): + """ + Node affinity is a group of node affinity scheduling rules. + """ # noqa: E501 + preferred_during_scheduling_ignored_during_execution: Optional[List[PreferredSchedulingTerm]] = Field(default=None, description="The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred.", alias="preferredDuringSchedulingIgnoredDuringExecution") + required_during_scheduling_ignored_during_execution: Optional[NodeSelector] = Field(default=None, alias="requiredDuringSchedulingIgnoredDuringExecution") + __properties: ClassVar[List[str]] = ["preferredDuringSchedulingIgnoredDuringExecution", "requiredDuringSchedulingIgnoredDuringExecution"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeAffinity from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in preferred_during_scheduling_ignored_during_execution (list) + _items = [] + if self.preferred_during_scheduling_ignored_during_execution: + for _item in self.preferred_during_scheduling_ignored_during_execution: + if _item: + _items.append(_item.to_dict()) + _dict['preferredDuringSchedulingIgnoredDuringExecution'] = _items + # override the default output from pydantic by calling `to_dict()` of required_during_scheduling_ignored_during_execution + if self.required_during_scheduling_ignored_during_execution: + _dict['requiredDuringSchedulingIgnoredDuringExecution'] = self.required_during_scheduling_ignored_during_execution.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeAffinity from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "preferredDuringSchedulingIgnoredDuringExecution": [PreferredSchedulingTerm.from_dict(_item) for _item in obj["preferredDuringSchedulingIgnoredDuringExecution"]] if obj.get("preferredDuringSchedulingIgnoredDuringExecution") is not None else None, + "requiredDuringSchedulingIgnoredDuringExecution": NodeSelector.from_dict(obj["requiredDuringSchedulingIgnoredDuringExecution"]) if obj.get("requiredDuringSchedulingIgnoredDuringExecution") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/node_selector.py b/sdks/python/client/argo_workflows/models/node_selector.py new file mode 100644 index 000000000000..86f50ee0c893 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/node_selector.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.node_selector_term import NodeSelectorTerm +from typing import Optional, Set +from typing_extensions import Self + +class NodeSelector(BaseModel): + """ + A node selector represents the union of the results of one or more label queries over a set of nodes; that is, it represents the OR of the selectors represented by the node selector terms. + """ # noqa: E501 + node_selector_terms: List[NodeSelectorTerm] = Field(description="Required. A list of node selector terms. The terms are ORed.", alias="nodeSelectorTerms") + __properties: ClassVar[List[str]] = ["nodeSelectorTerms"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeSelector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in node_selector_terms (list) + _items = [] + if self.node_selector_terms: + for _item in self.node_selector_terms: + if _item: + _items.append(_item.to_dict()) + _dict['nodeSelectorTerms'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeSelector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeSelectorTerms": [NodeSelectorTerm.from_dict(_item) for _item in obj["nodeSelectorTerms"]] if obj.get("nodeSelectorTerms") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/node_selector_requirement.py b/sdks/python/client/argo_workflows/models/node_selector_requirement.py new file mode 100644 index 000000000000..133349226044 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/node_selector_requirement.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NodeSelectorRequirement(BaseModel): + """ + A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. + """ # noqa: E501 + key: StrictStr = Field(description="The label key that the selector applies to.") + operator: StrictStr = Field(description="Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. Possible enum values: - `\"DoesNotExist\"` - `\"Exists\"` - `\"Gt\"` - `\"In\"` - `\"Lt\"` - `\"NotIn\"`") + values: Optional[List[StrictStr]] = Field(default=None, description="An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.") + __properties: ClassVar[List[str]] = ["key", "operator", "values"] + + @field_validator('operator') + def operator_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DoesNotExist', 'Exists', 'Gt', 'In', 'Lt', 'NotIn']): + raise ValueError("must be one of enum values ('DoesNotExist', 'Exists', 'Gt', 'In', 'Lt', 'NotIn')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeSelectorRequirement from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeSelectorRequirement from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "operator": obj.get("operator"), + "values": obj.get("values") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/node_selector_term.py b/sdks/python/client/argo_workflows/models/node_selector_term.py new file mode 100644 index 000000000000..f08a196f9caf --- /dev/null +++ b/sdks/python/client/argo_workflows/models/node_selector_term.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.node_selector_requirement import NodeSelectorRequirement +from typing import Optional, Set +from typing_extensions import Self + +class NodeSelectorTerm(BaseModel): + """ + A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm. + """ # noqa: E501 + match_expressions: Optional[List[NodeSelectorRequirement]] = Field(default=None, description="A list of node selector requirements by node's labels.", alias="matchExpressions") + match_fields: Optional[List[NodeSelectorRequirement]] = Field(default=None, description="A list of node selector requirements by node's fields.", alias="matchFields") + __properties: ClassVar[List[str]] = ["matchExpressions", "matchFields"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeSelectorTerm from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in match_expressions (list) + _items = [] + if self.match_expressions: + for _item in self.match_expressions: + if _item: + _items.append(_item.to_dict()) + _dict['matchExpressions'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in match_fields (list) + _items = [] + if self.match_fields: + for _item in self.match_fields: + if _item: + _items.append(_item.to_dict()) + _dict['matchFields'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeSelectorTerm from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "matchExpressions": [NodeSelectorRequirement.from_dict(_item) for _item in obj["matchExpressions"]] if obj.get("matchExpressions") is not None else None, + "matchFields": [NodeSelectorRequirement.from_dict(_item) for _item in obj["matchFields"]] if obj.get("matchFields") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/object_field_selector.py b/sdks/python/client/argo_workflows/models/object_field_selector.py new file mode 100644 index 000000000000..ed4271e9d29c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/object_field_selector.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ObjectFieldSelector(BaseModel): + """ + ObjectFieldSelector selects an APIVersioned field of an object. + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="Version of the schema the FieldPath is written in terms of, defaults to \"v1\".", alias="apiVersion") + field_path: StrictStr = Field(description="Path of the field to select in the specified API version.", alias="fieldPath") + __properties: ClassVar[List[str]] = ["apiVersion", "fieldPath"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ObjectFieldSelector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ObjectFieldSelector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "fieldPath": obj.get("fieldPath") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/object_meta.py b/sdks/python/client/argo_workflows/models/object_meta.py new file mode 100644 index 000000000000..4d85729b2dd9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/object_meta.py @@ -0,0 +1,134 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.managed_fields_entry import ManagedFieldsEntry +from argo_workflows.models.owner_reference import OwnerReference +from typing import Optional, Set +from typing_extensions import Self + +class ObjectMeta(BaseModel): + """ + ObjectMeta is metadata that all persisted resources must have, which includes all objects users must create. + """ # noqa: E501 + annotations: Optional[Dict[str, StrictStr]] = Field(default=None, description="Annotations is an unstructured key value map stored with a resource that may be set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. More info: http://kubernetes.io/docs/user-guide/annotations") + cluster_name: Optional[StrictStr] = Field(default=None, description="The name of the cluster which the object belongs to. This is used to distinguish resources with same name and namespace in different clusters. This field is not set anywhere right now and apiserver is going to ignore it if set in create or update request.", alias="clusterName") + creation_timestamp: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="creationTimestamp") + deletion_grace_period_seconds: Optional[StrictInt] = Field(default=None, description="Number of seconds allowed for this object to gracefully terminate before it will be removed from the system. Only set when deletionTimestamp is also set. May only be shortened. Read-only.", alias="deletionGracePeriodSeconds") + deletion_timestamp: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="deletionTimestamp") + finalizers: Optional[List[StrictStr]] = Field(default=None, description="Must be empty before the object is deleted from the registry. Each entry is an identifier for the responsible component that will remove the entry from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only be removed. Finalizers may be processed and removed in any order. Order is NOT enforced because it introduces significant risk of stuck finalizers. finalizers is a shared field, any actor with permission can reorder it. If the finalizer list is processed in order, then this can lead to a situation in which the component responsible for the first finalizer in the list is waiting for a signal (field value, external system, or other) produced by a component responsible for a finalizer later in the list, resulting in a deadlock. Without enforced ordering finalizers are free to order amongst themselves and are not vulnerable to ordering changes in the list.") + generate_name: Optional[StrictStr] = Field(default=None, description="GenerateName is an optional prefix, used by the server, to generate a unique name ONLY IF the Name field has not been provided. If this field is used, the name returned to the client will be different than the name passed. This value will also be combined with a unique suffix. The provided value has the same validation rules as the Name field, and may be truncated by the length of the suffix required to make the value unique on the server. If this field is specified and the generated name exists, the server will NOT return a 409 - instead, it will either return 201 Created or 500 with Reason ServerTimeout indicating a unique name could not be found in the time allotted, and the client should retry (optionally after the time indicated in the Retry-After header). Applied only if Name is not specified. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#idempotency", alias="generateName") + generation: Optional[StrictInt] = Field(default=None, description="A sequence number representing a specific generation of the desired state. Populated by the system. Read-only.") + labels: Optional[Dict[str, StrictStr]] = Field(default=None, description="Map of string keys and values that can be used to organize and categorize (scope and select) objects. May match selectors of replication controllers and services. More info: http://kubernetes.io/docs/user-guide/labels") + managed_fields: Optional[List[ManagedFieldsEntry]] = Field(default=None, description="ManagedFields maps workflow-id and version to the set of fields that are managed by that workflow. This is mostly for internal housekeeping, and users typically shouldn't need to set or understand this field. A workflow can be the user's name, a controller's name, or the name of a specific apply path like \"ci-cd\". The set of fields is always in the version that the workflow used when modifying the object.", alias="managedFields") + name: Optional[StrictStr] = Field(default=None, description="Name must be unique within a namespace. Is required when creating resources, although some resources may allow a client to request the generation of an appropriate name automatically. Name is primarily intended for creation idempotence and configuration definition. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/identifiers#names") + namespace: Optional[StrictStr] = Field(default=None, description="Namespace defines the space within which each name must be unique. An empty namespace is equivalent to the \"default\" namespace, but \"default\" is the canonical representation. Not all objects are required to be scoped to a namespace - the value of this field for those objects will be empty. Must be a DNS_LABEL. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/namespaces") + owner_references: Optional[List[OwnerReference]] = Field(default=None, description="List of objects depended by this object. If ALL objects in the list have been deleted, this object will be garbage collected. If this object is managed by a controller, then an entry in this list will point to this controller, with the controller field set to true. There cannot be more than one managing controller.", alias="ownerReferences") + resource_version: Optional[StrictStr] = Field(default=None, description="An opaque value that represents the internal version of this object that can be used by clients to determine when objects have changed. May be used for optimistic concurrency, change detection, and the watch operation on a resource or set of resources. Clients must treat these values as opaque and passed unmodified back to the server. They may only be valid for a particular resource or set of resources. Populated by the system. Read-only. Value must be treated as opaque by clients and . More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency", alias="resourceVersion") + self_link: Optional[StrictStr] = Field(default=None, description="SelfLink is a URL representing this object. Populated by the system. Read-only. DEPRECATED Kubernetes will stop propagating this field in 1.20 release and the field is planned to be removed in 1.21 release.", alias="selfLink") + uid: Optional[StrictStr] = Field(default=None, description="UID is the unique in time and space value for this object. It is typically generated by the server on successful creation of a resource and is not allowed to change on PUT operations. Populated by the system. Read-only. More info: http://kubernetes.io/docs/user-guide/identifiers#uids") + __properties: ClassVar[List[str]] = ["annotations", "clusterName", "creationTimestamp", "deletionGracePeriodSeconds", "deletionTimestamp", "finalizers", "generateName", "generation", "labels", "managedFields", "name", "namespace", "ownerReferences", "resourceVersion", "selfLink", "uid"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ObjectMeta from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in managed_fields (list) + _items = [] + if self.managed_fields: + for _item in self.managed_fields: + if _item: + _items.append(_item.to_dict()) + _dict['managedFields'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in owner_references (list) + _items = [] + if self.owner_references: + for _item in self.owner_references: + if _item: + _items.append(_item.to_dict()) + _dict['ownerReferences'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ObjectMeta from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "annotations": obj.get("annotations"), + "clusterName": obj.get("clusterName"), + "creationTimestamp": obj.get("creationTimestamp"), + "deletionGracePeriodSeconds": obj.get("deletionGracePeriodSeconds"), + "deletionTimestamp": obj.get("deletionTimestamp"), + "finalizers": obj.get("finalizers"), + "generateName": obj.get("generateName"), + "generation": obj.get("generation"), + "labels": obj.get("labels"), + "managedFields": [ManagedFieldsEntry.from_dict(_item) for _item in obj["managedFields"]] if obj.get("managedFields") is not None else None, + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "ownerReferences": [OwnerReference.from_dict(_item) for _item in obj["ownerReferences"]] if obj.get("ownerReferences") is not None else None, + "resourceVersion": obj.get("resourceVersion"), + "selfLink": obj.get("selfLink"), + "uid": obj.get("uid") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/object_reference.py b/sdks/python/client/argo_workflows/models/object_reference.py new file mode 100644 index 000000000000..12e91ff76b16 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/object_reference.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ObjectReference(BaseModel): + """ + ObjectReference contains enough information to let you inspect or modify the referred object. + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="API version of the referent.", alias="apiVersion") + field_path: Optional[StrictStr] = Field(default=None, description="If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: \"spec.containers{name}\" (where \"name\" refers to the name of the container that triggered the event) or if no container name is specified \"spec.containers[2]\" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object.", alias="fieldPath") + kind: Optional[StrictStr] = Field(default=None, description="Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + name: Optional[StrictStr] = Field(default=None, description="Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + namespace: Optional[StrictStr] = Field(default=None, description="Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/") + resource_version: Optional[StrictStr] = Field(default=None, description="Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency", alias="resourceVersion") + uid: Optional[StrictStr] = Field(default=None, description="UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids") + __properties: ClassVar[List[str]] = ["apiVersion", "fieldPath", "kind", "name", "namespace", "resourceVersion", "uid"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ObjectReference from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ObjectReference from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "fieldPath": obj.get("fieldPath"), + "kind": obj.get("kind"), + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "resourceVersion": obj.get("resourceVersion"), + "uid": obj.get("uid") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/owner_reference.py b/sdks/python/client/argo_workflows/models/owner_reference.py new file mode 100644 index 000000000000..bfd6ccf5e8f4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/owner_reference.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class OwnerReference(BaseModel): + """ + OwnerReference contains enough information to let you identify an owning object. An owning object must be in the same namespace as the dependent, or be cluster-scoped, so there is no namespace field. + """ # noqa: E501 + api_version: StrictStr = Field(description="API version of the referent.", alias="apiVersion") + block_owner_deletion: Optional[StrictBool] = Field(default=None, description="If true, AND if the owner has the \"foregroundDeletion\" finalizer, then the owner cannot be deleted from the key-value store until this reference is removed. Defaults to false. To set this field, a user needs \"delete\" permission of the owner, otherwise 422 (Unprocessable Entity) will be returned.", alias="blockOwnerDeletion") + controller: Optional[StrictBool] = Field(default=None, description="If true, this reference points to the managing controller.") + kind: StrictStr = Field(description="Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + name: StrictStr = Field(description="Name of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#names") + uid: StrictStr = Field(description="UID of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#uids") + __properties: ClassVar[List[str]] = ["apiVersion", "blockOwnerDeletion", "controller", "kind", "name", "uid"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OwnerReference from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OwnerReference from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "blockOwnerDeletion": obj.get("blockOwnerDeletion"), + "controller": obj.get("controller"), + "kind": obj.get("kind"), + "name": obj.get("name"), + "uid": obj.get("uid") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/persistent_volume_claim.py b/sdks/python/client/argo_workflows/models/persistent_volume_claim.py new file mode 100644 index 000000000000..eb247986c332 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/persistent_volume_claim.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.object_meta import ObjectMeta +from argo_workflows.models.persistent_volume_claim_spec import PersistentVolumeClaimSpec +from argo_workflows.models.persistent_volume_claim_status import PersistentVolumeClaimStatus +from typing import Optional, Set +from typing_extensions import Self + +class PersistentVolumeClaim(BaseModel): + """ + PersistentVolumeClaim is a user's request for and claim to a persistent volume + """ # noqa: E501 + api_version: Optional[StrictStr] = Field(default=None, description="APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources", alias="apiVersion") + kind: Optional[StrictStr] = Field(default=None, description="Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds") + metadata: Optional[ObjectMeta] = None + spec: Optional[PersistentVolumeClaimSpec] = None + status: Optional[PersistentVolumeClaimStatus] = None + __properties: ClassVar[List[str]] = ["apiVersion", "kind", "metadata", "spec", "status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PersistentVolumeClaim from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of spec + if self.spec: + _dict['spec'] = self.spec.to_dict() + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PersistentVolumeClaim from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiVersion": obj.get("apiVersion"), + "kind": obj.get("kind"), + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "spec": PersistentVolumeClaimSpec.from_dict(obj["spec"]) if obj.get("spec") is not None else None, + "status": PersistentVolumeClaimStatus.from_dict(obj["status"]) if obj.get("status") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/persistent_volume_claim_condition.py b/sdks/python/client/argo_workflows/models/persistent_volume_claim_condition.py new file mode 100644 index 000000000000..79e07c604cb1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/persistent_volume_claim_condition.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class PersistentVolumeClaimCondition(BaseModel): + """ + PersistentVolumeClaimCondition contails details about state of pvc + """ # noqa: E501 + last_probe_time: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="lastProbeTime") + last_transition_time: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.", alias="lastTransitionTime") + message: Optional[StrictStr] = Field(default=None, description="Human-readable message indicating details about last transition.") + reason: Optional[StrictStr] = Field(default=None, description="Unique, this should be a short, machine understandable string that gives the reason for condition's last transition. If it reports \"ResizeStarted\" that means the underlying persistent volume is being resized.") + status: StrictStr + type: StrictStr = Field(description=" Possible enum values: - `\"FileSystemResizePending\"` - controller resize is finished and a file system resize is pending on node - `\"Resizing\"` - a user trigger resize of pvc has been started") + __properties: ClassVar[List[str]] = ["lastProbeTime", "lastTransitionTime", "message", "reason", "status", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['FileSystemResizePending', 'Resizing']): + raise ValueError("must be one of enum values ('FileSystemResizePending', 'Resizing')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimCondition from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimCondition from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "lastProbeTime": obj.get("lastProbeTime"), + "lastTransitionTime": obj.get("lastTransitionTime"), + "message": obj.get("message"), + "reason": obj.get("reason"), + "status": obj.get("status"), + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/persistent_volume_claim_spec.py b/sdks/python/client/argo_workflows/models/persistent_volume_claim_spec.py new file mode 100644 index 000000000000..3bbd072359a3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/persistent_volume_claim_spec.py @@ -0,0 +1,116 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.label_selector import LabelSelector +from argo_workflows.models.resource_requirements import ResourceRequirements +from argo_workflows.models.typed_local_object_reference import TypedLocalObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class PersistentVolumeClaimSpec(BaseModel): + """ + PersistentVolumeClaimSpec describes the common attributes of storage devices and allows a Source for provider-specific attributes + """ # noqa: E501 + access_modes: Optional[List[StrictStr]] = Field(default=None, description="AccessModes contains the desired access modes the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1", alias="accessModes") + data_source: Optional[TypedLocalObjectReference] = Field(default=None, alias="dataSource") + data_source_ref: Optional[TypedLocalObjectReference] = Field(default=None, alias="dataSourceRef") + resources: Optional[ResourceRequirements] = None + selector: Optional[LabelSelector] = None + storage_class_name: Optional[StrictStr] = Field(default=None, description="Name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1", alias="storageClassName") + volume_mode: Optional[StrictStr] = Field(default=None, description="volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec.", alias="volumeMode") + volume_name: Optional[StrictStr] = Field(default=None, description="VolumeName is the binding reference to the PersistentVolume backing this claim.", alias="volumeName") + __properties: ClassVar[List[str]] = ["accessModes", "dataSource", "dataSourceRef", "resources", "selector", "storageClassName", "volumeMode", "volumeName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimSpec from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of data_source + if self.data_source: + _dict['dataSource'] = self.data_source.to_dict() + # override the default output from pydantic by calling `to_dict()` of data_source_ref + if self.data_source_ref: + _dict['dataSourceRef'] = self.data_source_ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of resources + if self.resources: + _dict['resources'] = self.resources.to_dict() + # override the default output from pydantic by calling `to_dict()` of selector + if self.selector: + _dict['selector'] = self.selector.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimSpec from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessModes": obj.get("accessModes"), + "dataSource": TypedLocalObjectReference.from_dict(obj["dataSource"]) if obj.get("dataSource") is not None else None, + "dataSourceRef": TypedLocalObjectReference.from_dict(obj["dataSourceRef"]) if obj.get("dataSourceRef") is not None else None, + "resources": ResourceRequirements.from_dict(obj["resources"]) if obj.get("resources") is not None else None, + "selector": LabelSelector.from_dict(obj["selector"]) if obj.get("selector") is not None else None, + "storageClassName": obj.get("storageClassName"), + "volumeMode": obj.get("volumeMode"), + "volumeName": obj.get("volumeName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/persistent_volume_claim_status.py b/sdks/python/client/argo_workflows/models/persistent_volume_claim_status.py new file mode 100644 index 000000000000..5fc08f82feb4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/persistent_volume_claim_status.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.persistent_volume_claim_condition import PersistentVolumeClaimCondition +from typing import Optional, Set +from typing_extensions import Self + +class PersistentVolumeClaimStatus(BaseModel): + """ + PersistentVolumeClaimStatus is the current status of a persistent volume claim. + """ # noqa: E501 + access_modes: Optional[List[StrictStr]] = Field(default=None, description="AccessModes contains the actual access modes the volume backing the PVC has. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1", alias="accessModes") + allocated_resources: Optional[Dict[str, StrictStr]] = Field(default=None, description="The storage resource within AllocatedResources tracks the capacity allocated to a PVC. It may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature.", alias="allocatedResources") + capacity: Optional[Dict[str, StrictStr]] = Field(default=None, description="Represents the actual resources of the underlying volume.") + conditions: Optional[List[PersistentVolumeClaimCondition]] = Field(default=None, description="Current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'ResizeStarted'.") + phase: Optional[StrictStr] = Field(default=None, description="Phase represents the current phase of PersistentVolumeClaim. Possible enum values: - `\"Bound\"` used for PersistentVolumeClaims that are bound - `\"Lost\"` used for PersistentVolumeClaims that lost their underlying PersistentVolume. The claim was bound to a PersistentVolume and this volume does not exist any longer and all data on it was lost. - `\"Pending\"` used for PersistentVolumeClaims that are not yet bound") + resize_status: Optional[StrictStr] = Field(default=None, description="ResizeStatus stores status of resize operation. ResizeStatus is not set by default but when expansion is complete resizeStatus is set to empty string by resize controller or kubelet. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature.", alias="resizeStatus") + __properties: ClassVar[List[str]] = ["accessModes", "allocatedResources", "capacity", "conditions", "phase", "resizeStatus"] + + @field_validator('phase') + def phase_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Bound', 'Lost', 'Pending']): + raise ValueError("must be one of enum values ('Bound', 'Lost', 'Pending')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in conditions (list) + _items = [] + if self.conditions: + for _item in self.conditions: + if _item: + _items.append(_item.to_dict()) + _dict['conditions'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessModes": obj.get("accessModes"), + "allocatedResources": obj.get("allocatedResources"), + "capacity": obj.get("capacity"), + "conditions": [PersistentVolumeClaimCondition.from_dict(_item) for _item in obj["conditions"]] if obj.get("conditions") is not None else None, + "phase": obj.get("phase"), + "resizeStatus": obj.get("resizeStatus") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/persistent_volume_claim_template.py b/sdks/python/client/argo_workflows/models/persistent_volume_claim_template.py new file mode 100644 index 000000000000..54850679cf04 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/persistent_volume_claim_template.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.object_meta import ObjectMeta +from argo_workflows.models.persistent_volume_claim_spec import PersistentVolumeClaimSpec +from typing import Optional, Set +from typing_extensions import Self + +class PersistentVolumeClaimTemplate(BaseModel): + """ + PersistentVolumeClaimTemplate is used to produce PersistentVolumeClaim objects as part of an EphemeralVolumeSource. + """ # noqa: E501 + metadata: Optional[ObjectMeta] = None + spec: PersistentVolumeClaimSpec + __properties: ClassVar[List[str]] = ["metadata", "spec"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimTemplate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of spec + if self.spec: + _dict['spec'] = self.spec.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimTemplate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "metadata": ObjectMeta.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, + "spec": PersistentVolumeClaimSpec.from_dict(obj["spec"]) if obj.get("spec") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/persistent_volume_claim_volume_source.py b/sdks/python/client/argo_workflows/models/persistent_volume_claim_volume_source.py new file mode 100644 index 000000000000..b87ab8a58cb6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/persistent_volume_claim_volume_source.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class PersistentVolumeClaimVolumeSource(BaseModel): + """ + PersistentVolumeClaimVolumeSource references the user's PVC in the same namespace. This volume finds the bound PV and mounts that volume for the pod. A PersistentVolumeClaimVolumeSource is, essentially, a wrapper around another type of volume that is owned by someone else (the system). + """ # noqa: E501 + claim_name: StrictStr = Field(description="ClaimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims", alias="claimName") + read_only: Optional[StrictBool] = Field(default=None, description="Will force the ReadOnly setting in VolumeMounts. Default false.", alias="readOnly") + __properties: ClassVar[List[str]] = ["claimName", "readOnly"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PersistentVolumeClaimVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "claimName": obj.get("claimName"), + "readOnly": obj.get("readOnly") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/photon_persistent_disk_volume_source.py b/sdks/python/client/argo_workflows/models/photon_persistent_disk_volume_source.py new file mode 100644 index 000000000000..7121991b2b8e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/photon_persistent_disk_volume_source.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class PhotonPersistentDiskVolumeSource(BaseModel): + """ + Represents a Photon Controller persistent disk resource. + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.", alias="fsType") + pd_id: StrictStr = Field(description="ID that identifies Photon Controller persistent disk", alias="pdID") + __properties: ClassVar[List[str]] = ["fsType", "pdID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PhotonPersistentDiskVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PhotonPersistentDiskVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "pdID": obj.get("pdID") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/pod_affinity.py b/sdks/python/client/argo_workflows/models/pod_affinity.py new file mode 100644 index 000000000000..edbd879ea69d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/pod_affinity.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.pod_affinity_term import PodAffinityTerm +from argo_workflows.models.weighted_pod_affinity_term import WeightedPodAffinityTerm +from typing import Optional, Set +from typing_extensions import Self + +class PodAffinity(BaseModel): + """ + Pod affinity is a group of inter pod affinity scheduling rules. + """ # noqa: E501 + preferred_during_scheduling_ignored_during_execution: Optional[List[WeightedPodAffinityTerm]] = Field(default=None, description="The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.", alias="preferredDuringSchedulingIgnoredDuringExecution") + required_during_scheduling_ignored_during_execution: Optional[List[PodAffinityTerm]] = Field(default=None, description="If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.", alias="requiredDuringSchedulingIgnoredDuringExecution") + __properties: ClassVar[List[str]] = ["preferredDuringSchedulingIgnoredDuringExecution", "requiredDuringSchedulingIgnoredDuringExecution"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PodAffinity from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in preferred_during_scheduling_ignored_during_execution (list) + _items = [] + if self.preferred_during_scheduling_ignored_during_execution: + for _item in self.preferred_during_scheduling_ignored_during_execution: + if _item: + _items.append(_item.to_dict()) + _dict['preferredDuringSchedulingIgnoredDuringExecution'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in required_during_scheduling_ignored_during_execution (list) + _items = [] + if self.required_during_scheduling_ignored_during_execution: + for _item in self.required_during_scheduling_ignored_during_execution: + if _item: + _items.append(_item.to_dict()) + _dict['requiredDuringSchedulingIgnoredDuringExecution'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PodAffinity from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "preferredDuringSchedulingIgnoredDuringExecution": [WeightedPodAffinityTerm.from_dict(_item) for _item in obj["preferredDuringSchedulingIgnoredDuringExecution"]] if obj.get("preferredDuringSchedulingIgnoredDuringExecution") is not None else None, + "requiredDuringSchedulingIgnoredDuringExecution": [PodAffinityTerm.from_dict(_item) for _item in obj["requiredDuringSchedulingIgnoredDuringExecution"]] if obj.get("requiredDuringSchedulingIgnoredDuringExecution") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/pod_affinity_term.py b/sdks/python/client/argo_workflows/models/pod_affinity_term.py new file mode 100644 index 000000000000..58870207562e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/pod_affinity_term.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.label_selector import LabelSelector +from typing import Optional, Set +from typing_extensions import Self + +class PodAffinityTerm(BaseModel): + """ + Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key matches that of any node on which a pod of the set of pods is running + """ # noqa: E501 + label_selector: Optional[LabelSelector] = Field(default=None, alias="labelSelector") + namespace_selector: Optional[LabelSelector] = Field(default=None, alias="namespaceSelector") + namespaces: Optional[List[StrictStr]] = Field(default=None, description="namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means \"this pod's namespace\"") + topology_key: StrictStr = Field(description="This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.", alias="topologyKey") + __properties: ClassVar[List[str]] = ["labelSelector", "namespaceSelector", "namespaces", "topologyKey"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PodAffinityTerm from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of label_selector + if self.label_selector: + _dict['labelSelector'] = self.label_selector.to_dict() + # override the default output from pydantic by calling `to_dict()` of namespace_selector + if self.namespace_selector: + _dict['namespaceSelector'] = self.namespace_selector.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PodAffinityTerm from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "labelSelector": LabelSelector.from_dict(obj["labelSelector"]) if obj.get("labelSelector") is not None else None, + "namespaceSelector": LabelSelector.from_dict(obj["namespaceSelector"]) if obj.get("namespaceSelector") is not None else None, + "namespaces": obj.get("namespaces"), + "topologyKey": obj.get("topologyKey") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/pod_anti_affinity.py b/sdks/python/client/argo_workflows/models/pod_anti_affinity.py new file mode 100644 index 000000000000..15c75a553a7e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/pod_anti_affinity.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.pod_affinity_term import PodAffinityTerm +from argo_workflows.models.weighted_pod_affinity_term import WeightedPodAffinityTerm +from typing import Optional, Set +from typing_extensions import Self + +class PodAntiAffinity(BaseModel): + """ + Pod anti affinity is a group of inter pod anti affinity scheduling rules. + """ # noqa: E501 + preferred_during_scheduling_ignored_during_execution: Optional[List[WeightedPodAffinityTerm]] = Field(default=None, description="The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.", alias="preferredDuringSchedulingIgnoredDuringExecution") + required_during_scheduling_ignored_during_execution: Optional[List[PodAffinityTerm]] = Field(default=None, description="If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.", alias="requiredDuringSchedulingIgnoredDuringExecution") + __properties: ClassVar[List[str]] = ["preferredDuringSchedulingIgnoredDuringExecution", "requiredDuringSchedulingIgnoredDuringExecution"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PodAntiAffinity from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in preferred_during_scheduling_ignored_during_execution (list) + _items = [] + if self.preferred_during_scheduling_ignored_during_execution: + for _item in self.preferred_during_scheduling_ignored_during_execution: + if _item: + _items.append(_item.to_dict()) + _dict['preferredDuringSchedulingIgnoredDuringExecution'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in required_during_scheduling_ignored_during_execution (list) + _items = [] + if self.required_during_scheduling_ignored_during_execution: + for _item in self.required_during_scheduling_ignored_during_execution: + if _item: + _items.append(_item.to_dict()) + _dict['requiredDuringSchedulingIgnoredDuringExecution'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PodAntiAffinity from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "preferredDuringSchedulingIgnoredDuringExecution": [WeightedPodAffinityTerm.from_dict(_item) for _item in obj["preferredDuringSchedulingIgnoredDuringExecution"]] if obj.get("preferredDuringSchedulingIgnoredDuringExecution") is not None else None, + "requiredDuringSchedulingIgnoredDuringExecution": [PodAffinityTerm.from_dict(_item) for _item in obj["requiredDuringSchedulingIgnoredDuringExecution"]] if obj.get("requiredDuringSchedulingIgnoredDuringExecution") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/pod_dns_config.py b/sdks/python/client/argo_workflows/models/pod_dns_config.py new file mode 100644 index 000000000000..01d550f7a87a --- /dev/null +++ b/sdks/python/client/argo_workflows/models/pod_dns_config.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.pod_dns_config_option import PodDNSConfigOption +from typing import Optional, Set +from typing_extensions import Self + +class PodDNSConfig(BaseModel): + """ + PodDNSConfig defines the DNS parameters of a pod in addition to those generated from DNSPolicy. + """ # noqa: E501 + nameservers: Optional[List[StrictStr]] = Field(default=None, description="A list of DNS name server IP addresses. This will be appended to the base nameservers generated from DNSPolicy. Duplicated nameservers will be removed.") + options: Optional[List[PodDNSConfigOption]] = Field(default=None, description="A list of DNS resolver options. This will be merged with the base options generated from DNSPolicy. Duplicated entries will be removed. Resolution options given in Options will override those that appear in the base DNSPolicy.") + searches: Optional[List[StrictStr]] = Field(default=None, description="A list of DNS search domains for host-name lookup. This will be appended to the base search paths generated from DNSPolicy. Duplicated search paths will be removed.") + __properties: ClassVar[List[str]] = ["nameservers", "options", "searches"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PodDNSConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in options (list) + _items = [] + if self.options: + for _item in self.options: + if _item: + _items.append(_item.to_dict()) + _dict['options'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PodDNSConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nameservers": obj.get("nameservers"), + "options": [PodDNSConfigOption.from_dict(_item) for _item in obj["options"]] if obj.get("options") is not None else None, + "searches": obj.get("searches") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/pod_dns_config_option.py b/sdks/python/client/argo_workflows/models/pod_dns_config_option.py new file mode 100644 index 000000000000..8cd173998e6f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/pod_dns_config_option.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class PodDNSConfigOption(BaseModel): + """ + PodDNSConfigOption defines DNS resolver options of a pod. + """ # noqa: E501 + name: Optional[StrictStr] = Field(default=None, description="Required.") + value: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PodDNSConfigOption from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PodDNSConfigOption from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/pod_security_context.py b/sdks/python/client/argo_workflows/models/pod_security_context.py new file mode 100644 index 000000000000..0c8860634d6b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/pod_security_context.py @@ -0,0 +1,125 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.se_linux_options import SELinuxOptions +from argo_workflows.models.seccomp_profile import SeccompProfile +from argo_workflows.models.sysctl import Sysctl +from argo_workflows.models.windows_security_context_options import WindowsSecurityContextOptions +from typing import Optional, Set +from typing_extensions import Self + +class PodSecurityContext(BaseModel): + """ + PodSecurityContext holds pod-level security attributes and common container settings. Some fields are also present in container.securityContext. Field values of container.securityContext take precedence over field values of PodSecurityContext. + """ # noqa: E501 + fs_group: Optional[StrictInt] = Field(default=None, description="A special supplemental group that applies to all containers in a pod. Some volume types allow the Kubelet to change the ownership of that volume to be owned by the pod: 1. The owning GID will be the FSGroup 2. The setgid bit is set (new files created in the volume will be owned by FSGroup) 3. The permission bits are OR'd with rw-rw---- If unset, the Kubelet will not modify the ownership and permissions of any volume. Note that this field cannot be set when spec.os.name is windows.", alias="fsGroup") + fs_group_change_policy: Optional[StrictStr] = Field(default=None, description="fsGroupChangePolicy defines behavior of changing ownership and permission of the volume before being exposed inside Pod. This field will only apply to volume types which support fsGroup based ownership(and permissions). It will have no effect on ephemeral volume types such as: secret, configmaps and emptydir. Valid values are \"OnRootMismatch\" and \"Always\". If not specified, \"Always\" is used. Note that this field cannot be set when spec.os.name is windows.", alias="fsGroupChangePolicy") + run_as_group: Optional[StrictInt] = Field(default=None, description="The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.", alias="runAsGroup") + run_as_non_root: Optional[StrictBool] = Field(default=None, description="Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.", alias="runAsNonRoot") + run_as_user: Optional[StrictInt] = Field(default=None, description="The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.", alias="runAsUser") + se_linux_options: Optional[SELinuxOptions] = Field(default=None, alias="seLinuxOptions") + seccomp_profile: Optional[SeccompProfile] = Field(default=None, alias="seccompProfile") + supplemental_groups: Optional[List[StrictInt]] = Field(default=None, description="A list of groups applied to the first process run in each container, in addition to the container's primary GID. If unspecified, no groups will be added to any container. Note that this field cannot be set when spec.os.name is windows.", alias="supplementalGroups") + sysctls: Optional[List[Sysctl]] = Field(default=None, description="Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows.") + windows_options: Optional[WindowsSecurityContextOptions] = Field(default=None, alias="windowsOptions") + __properties: ClassVar[List[str]] = ["fsGroup", "fsGroupChangePolicy", "runAsGroup", "runAsNonRoot", "runAsUser", "seLinuxOptions", "seccompProfile", "supplementalGroups", "sysctls", "windowsOptions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PodSecurityContext from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of se_linux_options + if self.se_linux_options: + _dict['seLinuxOptions'] = self.se_linux_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of seccomp_profile + if self.seccomp_profile: + _dict['seccompProfile'] = self.seccomp_profile.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in sysctls (list) + _items = [] + if self.sysctls: + for _item in self.sysctls: + if _item: + _items.append(_item.to_dict()) + _dict['sysctls'] = _items + # override the default output from pydantic by calling `to_dict()` of windows_options + if self.windows_options: + _dict['windowsOptions'] = self.windows_options.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PodSecurityContext from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsGroup": obj.get("fsGroup"), + "fsGroupChangePolicy": obj.get("fsGroupChangePolicy"), + "runAsGroup": obj.get("runAsGroup"), + "runAsNonRoot": obj.get("runAsNonRoot"), + "runAsUser": obj.get("runAsUser"), + "seLinuxOptions": SELinuxOptions.from_dict(obj["seLinuxOptions"]) if obj.get("seLinuxOptions") is not None else None, + "seccompProfile": SeccompProfile.from_dict(obj["seccompProfile"]) if obj.get("seccompProfile") is not None else None, + "supplementalGroups": obj.get("supplementalGroups"), + "sysctls": [Sysctl.from_dict(_item) for _item in obj["sysctls"]] if obj.get("sysctls") is not None else None, + "windowsOptions": WindowsSecurityContextOptions.from_dict(obj["windowsOptions"]) if obj.get("windowsOptions") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/portworx_volume_source.py b/sdks/python/client/argo_workflows/models/portworx_volume_source.py new file mode 100644 index 000000000000..9f91bdb184a9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/portworx_volume_source.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class PortworxVolumeSource(BaseModel): + """ + PortworxVolumeSource represents a Portworx volume resource. + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="FSType represents the filesystem type to mount Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\". Implicitly inferred to be \"ext4\" if unspecified.", alias="fsType") + read_only: Optional[StrictBool] = Field(default=None, description="Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.", alias="readOnly") + volume_id: StrictStr = Field(description="VolumeID uniquely identifies a Portworx volume", alias="volumeID") + __properties: ClassVar[List[str]] = ["fsType", "readOnly", "volumeID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PortworxVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PortworxVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "readOnly": obj.get("readOnly"), + "volumeID": obj.get("volumeID") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/preferred_scheduling_term.py b/sdks/python/client/argo_workflows/models/preferred_scheduling_term.py new file mode 100644 index 000000000000..4ba7ecdd1b89 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/preferred_scheduling_term.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.node_selector_term import NodeSelectorTerm +from typing import Optional, Set +from typing_extensions import Self + +class PreferredSchedulingTerm(BaseModel): + """ + An empty preferred scheduling term matches all objects with implicit weight 0 (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op). + """ # noqa: E501 + preference: NodeSelectorTerm + weight: StrictInt = Field(description="Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100.") + __properties: ClassVar[List[str]] = ["preference", "weight"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PreferredSchedulingTerm from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of preference + if self.preference: + _dict['preference'] = self.preference.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PreferredSchedulingTerm from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "preference": NodeSelectorTerm.from_dict(obj["preference"]) if obj.get("preference") is not None else None, + "weight": obj.get("weight") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/probe.py b/sdks/python/client/argo_workflows/models/probe.py new file mode 100644 index 000000000000..1f83c857d5e6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/probe.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.exec_action import ExecAction +from argo_workflows.models.grpc_action import GRPCAction +from argo_workflows.models.http_get_action import HTTPGetAction +from argo_workflows.models.tcp_socket_action import TCPSocketAction +from typing import Optional, Set +from typing_extensions import Self + +class Probe(BaseModel): + """ + Probe describes a health check to be performed against a container to determine whether it is alive or ready to receive traffic. + """ # noqa: E501 + var_exec: Optional[ExecAction] = Field(default=None, alias="exec") + failure_threshold: Optional[StrictInt] = Field(default=None, description="Minimum consecutive failures for the probe to be considered failed after having succeeded. Defaults to 3. Minimum value is 1.", alias="failureThreshold") + grpc: Optional[GRPCAction] = None + http_get: Optional[HTTPGetAction] = Field(default=None, alias="httpGet") + initial_delay_seconds: Optional[StrictInt] = Field(default=None, description="Number of seconds after the container has started before liveness probes are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes", alias="initialDelaySeconds") + period_seconds: Optional[StrictInt] = Field(default=None, description="How often (in seconds) to perform the probe. Default to 10 seconds. Minimum value is 1.", alias="periodSeconds") + success_threshold: Optional[StrictInt] = Field(default=None, description="Minimum consecutive successes for the probe to be considered successful after having failed. Defaults to 1. Must be 1 for liveness and startup. Minimum value is 1.", alias="successThreshold") + tcp_socket: Optional[TCPSocketAction] = Field(default=None, alias="tcpSocket") + termination_grace_period_seconds: Optional[StrictInt] = Field(default=None, description="Optional duration in seconds the pod needs to terminate gracefully upon probe failure. The grace period is the duration in seconds after the processes running in the pod are sent a termination signal and the time when the processes are forcibly halted with a kill signal. Set this value longer than the expected cleanup time for your process. If this value is nil, the pod's terminationGracePeriodSeconds will be used. Otherwise, this value overrides the value provided by the pod spec. Value must be non-negative integer. The value zero indicates stop immediately via the kill signal (no opportunity to shut down). This is a beta field and requires enabling ProbeTerminationGracePeriod feature gate. Minimum value is 1. spec.terminationGracePeriodSeconds is used if unset.", alias="terminationGracePeriodSeconds") + timeout_seconds: Optional[StrictInt] = Field(default=None, description="Number of seconds after which the probe times out. Defaults to 1 second. Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes", alias="timeoutSeconds") + __properties: ClassVar[List[str]] = ["exec", "failureThreshold", "grpc", "httpGet", "initialDelaySeconds", "periodSeconds", "successThreshold", "tcpSocket", "terminationGracePeriodSeconds", "timeoutSeconds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Probe from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of var_exec + if self.var_exec: + _dict['exec'] = self.var_exec.to_dict() + # override the default output from pydantic by calling `to_dict()` of grpc + if self.grpc: + _dict['grpc'] = self.grpc.to_dict() + # override the default output from pydantic by calling `to_dict()` of http_get + if self.http_get: + _dict['httpGet'] = self.http_get.to_dict() + # override the default output from pydantic by calling `to_dict()` of tcp_socket + if self.tcp_socket: + _dict['tcpSocket'] = self.tcp_socket.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Probe from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "exec": ExecAction.from_dict(obj["exec"]) if obj.get("exec") is not None else None, + "failureThreshold": obj.get("failureThreshold"), + "grpc": GRPCAction.from_dict(obj["grpc"]) if obj.get("grpc") is not None else None, + "httpGet": HTTPGetAction.from_dict(obj["httpGet"]) if obj.get("httpGet") is not None else None, + "initialDelaySeconds": obj.get("initialDelaySeconds"), + "periodSeconds": obj.get("periodSeconds"), + "successThreshold": obj.get("successThreshold"), + "tcpSocket": TCPSocketAction.from_dict(obj["tcpSocket"]) if obj.get("tcpSocket") is not None else None, + "terminationGracePeriodSeconds": obj.get("terminationGracePeriodSeconds"), + "timeoutSeconds": obj.get("timeoutSeconds") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/projected_volume_source.py b/sdks/python/client/argo_workflows/models/projected_volume_source.py new file mode 100644 index 000000000000..88d39af5913e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/projected_volume_source.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.volume_projection import VolumeProjection +from typing import Optional, Set +from typing_extensions import Self + +class ProjectedVolumeSource(BaseModel): + """ + Represents a projected volume source + """ # noqa: E501 + default_mode: Optional[StrictInt] = Field(default=None, description="Mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.", alias="defaultMode") + sources: Optional[List[VolumeProjection]] = Field(default=None, description="list of volume projections") + __properties: ClassVar[List[str]] = ["defaultMode", "sources"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ProjectedVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in sources (list) + _items = [] + if self.sources: + for _item in self.sources: + if _item: + _items.append(_item.to_dict()) + _dict['sources'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ProjectedVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "defaultMode": obj.get("defaultMode"), + "sources": [VolumeProjection.from_dict(_item) for _item in obj["sources"]] if obj.get("sources") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/quobyte_volume_source.py b/sdks/python/client/argo_workflows/models/quobyte_volume_source.py new file mode 100644 index 000000000000..9c54d5a77048 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/quobyte_volume_source.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class QuobyteVolumeSource(BaseModel): + """ + Represents a Quobyte mount that lasts the lifetime of a pod. Quobyte volumes do not support ownership management or SELinux relabeling. + """ # noqa: E501 + group: Optional[StrictStr] = Field(default=None, description="Group to map volume access to Default is no group") + read_only: Optional[StrictBool] = Field(default=None, description="ReadOnly here will force the Quobyte volume to be mounted with read-only permissions. Defaults to false.", alias="readOnly") + registry: StrictStr = Field(description="Registry represents a single or multiple Quobyte Registry services specified as a string as host:port pair (multiple entries are separated with commas) which acts as the central registry for volumes") + tenant: Optional[StrictStr] = Field(default=None, description="Tenant owning the given Quobyte volume in the Backend Used with dynamically provisioned Quobyte volumes, value is set by the plugin") + user: Optional[StrictStr] = Field(default=None, description="User to map volume access to Defaults to serivceaccount user") + volume: StrictStr = Field(description="Volume is a string that references an already created Quobyte volume by name.") + __properties: ClassVar[List[str]] = ["group", "readOnly", "registry", "tenant", "user", "volume"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of QuobyteVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of QuobyteVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "group": obj.get("group"), + "readOnly": obj.get("readOnly"), + "registry": obj.get("registry"), + "tenant": obj.get("tenant"), + "user": obj.get("user"), + "volume": obj.get("volume") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/rbd_volume_source.py b/sdks/python/client/argo_workflows/models/rbd_volume_source.py new file mode 100644 index 000000000000..4a76e53da410 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/rbd_volume_source.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.local_object_reference import LocalObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class RBDVolumeSource(BaseModel): + """ + Represents a Rados Block Device mount that lasts the lifetime of a pod. RBD volumes support ownership management and SELinux relabeling. + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd", alias="fsType") + image: StrictStr = Field(description="The rados image name. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it") + keyring: Optional[StrictStr] = Field(default=None, description="Keyring is the path to key ring for RBDUser. Default is /etc/ceph/keyring. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it") + monitors: List[StrictStr] = Field(description="A collection of Ceph monitors. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it") + pool: Optional[StrictStr] = Field(default=None, description="The rados pool name. Default is rbd. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it") + read_only: Optional[StrictBool] = Field(default=None, description="ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", alias="readOnly") + secret_ref: Optional[LocalObjectReference] = Field(default=None, alias="secretRef") + user: Optional[StrictStr] = Field(default=None, description="The rados user name. Default is admin. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it") + __properties: ClassVar[List[str]] = ["fsType", "image", "keyring", "monitors", "pool", "readOnly", "secretRef", "user"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RBDVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of secret_ref + if self.secret_ref: + _dict['secretRef'] = self.secret_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RBDVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "image": obj.get("image"), + "keyring": obj.get("keyring"), + "monitors": obj.get("monitors"), + "pool": obj.get("pool"), + "readOnly": obj.get("readOnly"), + "secretRef": LocalObjectReference.from_dict(obj["secretRef"]) if obj.get("secretRef") is not None else None, + "user": obj.get("user") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/resource_field_selector.py b/sdks/python/client/argo_workflows/models/resource_field_selector.py new file mode 100644 index 000000000000..e58b739535f6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/resource_field_selector.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ResourceFieldSelector(BaseModel): + """ + ResourceFieldSelector represents container resources (cpu, memory) and their output format + """ # noqa: E501 + container_name: Optional[StrictStr] = Field(default=None, description="Container name: required for volumes, optional for env vars", alias="containerName") + divisor: Optional[StrictStr] = Field(default=None, description="Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: ::= (Note that may be empty, from the \"\" case in .) ::= 0 | 1 | ... | 9 ::= | ::= | . | . | . ::= \"+\" | \"-\" ::= | ::= | | ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) ::= \"e\" | \"E\" No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation.") + resource: StrictStr = Field(description="Required: resource to select") + __properties: ClassVar[List[str]] = ["containerName", "divisor", "resource"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ResourceFieldSelector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ResourceFieldSelector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "containerName": obj.get("containerName"), + "divisor": obj.get("divisor"), + "resource": obj.get("resource") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/resource_requirements.py b/sdks/python/client/argo_workflows/models/resource_requirements.py new file mode 100644 index 000000000000..5862587f474e --- /dev/null +++ b/sdks/python/client/argo_workflows/models/resource_requirements.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ResourceRequirements(BaseModel): + """ + ResourceRequirements describes the compute resource requirements. + """ # noqa: E501 + limits: Optional[Dict[str, StrictStr]] = Field(default=None, description="Limits describes the maximum amount of compute resources allowed. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/") + requests: Optional[Dict[str, StrictStr]] = Field(default=None, description="Requests describes the minimum amount of compute resources required. If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, otherwise to an implementation-defined value. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/") + __properties: ClassVar[List[str]] = ["limits", "requests"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ResourceRequirements from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ResourceRequirements from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "limits": obj.get("limits"), + "requests": obj.get("requests") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/scale_io_volume_source.py b/sdks/python/client/argo_workflows/models/scale_io_volume_source.py new file mode 100644 index 000000000000..0f825e313297 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/scale_io_volume_source.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.local_object_reference import LocalObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class ScaleIOVolumeSource(BaseModel): + """ + ScaleIOVolumeSource represents a persistent ScaleIO volume + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Default is \"xfs\".", alias="fsType") + gateway: StrictStr = Field(description="The host address of the ScaleIO API Gateway.") + protection_domain: Optional[StrictStr] = Field(default=None, description="The name of the ScaleIO Protection Domain for the configured storage.", alias="protectionDomain") + read_only: Optional[StrictBool] = Field(default=None, description="Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.", alias="readOnly") + secret_ref: LocalObjectReference = Field(alias="secretRef") + ssl_enabled: Optional[StrictBool] = Field(default=None, description="Flag to enable/disable SSL communication with Gateway, default false", alias="sslEnabled") + storage_mode: Optional[StrictStr] = Field(default=None, description="Indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. Default is ThinProvisioned.", alias="storageMode") + storage_pool: Optional[StrictStr] = Field(default=None, description="The ScaleIO Storage Pool associated with the protection domain.", alias="storagePool") + system: StrictStr = Field(description="The name of the storage system as configured in ScaleIO.") + volume_name: Optional[StrictStr] = Field(default=None, description="The name of a volume already created in the ScaleIO system that is associated with this volume source.", alias="volumeName") + __properties: ClassVar[List[str]] = ["fsType", "gateway", "protectionDomain", "readOnly", "secretRef", "sslEnabled", "storageMode", "storagePool", "system", "volumeName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ScaleIOVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of secret_ref + if self.secret_ref: + _dict['secretRef'] = self.secret_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ScaleIOVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "gateway": obj.get("gateway"), + "protectionDomain": obj.get("protectionDomain"), + "readOnly": obj.get("readOnly"), + "secretRef": LocalObjectReference.from_dict(obj["secretRef"]) if obj.get("secretRef") is not None else None, + "sslEnabled": obj.get("sslEnabled"), + "storageMode": obj.get("storageMode"), + "storagePool": obj.get("storagePool"), + "system": obj.get("system"), + "volumeName": obj.get("volumeName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/se_linux_options.py b/sdks/python/client/argo_workflows/models/se_linux_options.py new file mode 100644 index 000000000000..315d816869e1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/se_linux_options.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SELinuxOptions(BaseModel): + """ + SELinuxOptions are the labels to be applied to the container + """ # noqa: E501 + level: Optional[StrictStr] = Field(default=None, description="Level is SELinux level label that applies to the container.") + role: Optional[StrictStr] = Field(default=None, description="Role is a SELinux role label that applies to the container.") + type: Optional[StrictStr] = Field(default=None, description="Type is a SELinux type label that applies to the container.") + user: Optional[StrictStr] = Field(default=None, description="User is a SELinux user label that applies to the container.") + __properties: ClassVar[List[str]] = ["level", "role", "type", "user"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SELinuxOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SELinuxOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "level": obj.get("level"), + "role": obj.get("role"), + "type": obj.get("type"), + "user": obj.get("user") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/seccomp_profile.py b/sdks/python/client/argo_workflows/models/seccomp_profile.py new file mode 100644 index 000000000000..4ba2f7e2f94d --- /dev/null +++ b/sdks/python/client/argo_workflows/models/seccomp_profile.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SeccompProfile(BaseModel): + """ + SeccompProfile defines a pod/container's seccomp profile settings. Only one profile source may be set. + """ # noqa: E501 + localhost_profile: Optional[StrictStr] = Field(default=None, description="localhostProfile indicates a profile defined in a file on the node should be used. The profile must be preconfigured on the node to work. Must be a descending path, relative to the kubelet's configured seccomp profile location. Must only be set if type is \"Localhost\".", alias="localhostProfile") + type: StrictStr = Field(description="type indicates which kind of seccomp profile will be applied. Valid options are: Localhost - a profile defined in a file on the node should be used. RuntimeDefault - the container runtime default profile should be used. Unconfined - no profile should be applied. Possible enum values: - `\"Localhost\"` indicates a profile defined in a file on the node should be used. The file's location relative to /seccomp. - `\"RuntimeDefault\"` represents the default container runtime seccomp profile. - `\"Unconfined\"` indicates no seccomp profile is applied (A.K.A. unconfined).") + __properties: ClassVar[List[str]] = ["localhostProfile", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['Localhost', 'RuntimeDefault', 'Unconfined']): + raise ValueError("must be one of enum values ('Localhost', 'RuntimeDefault', 'Unconfined')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SeccompProfile from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SeccompProfile from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "localhostProfile": obj.get("localhostProfile"), + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/secret_env_source.py b/sdks/python/client/argo_workflows/models/secret_env_source.py new file mode 100644 index 000000000000..b2e8c67864a2 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/secret_env_source.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SecretEnvSource(BaseModel): + """ + SecretEnvSource selects a Secret to populate the environment variables with. The contents of the target Secret's Data field will represent the key-value pairs as environment variables. + """ # noqa: E501 + name: Optional[StrictStr] = Field(default=None, description="Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + optional: Optional[StrictBool] = Field(default=None, description="Specify whether the Secret must be defined") + __properties: ClassVar[List[str]] = ["name", "optional"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SecretEnvSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SecretEnvSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "optional": obj.get("optional") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/secret_key_selector.py b/sdks/python/client/argo_workflows/models/secret_key_selector.py new file mode 100644 index 000000000000..5873173eb9aa --- /dev/null +++ b/sdks/python/client/argo_workflows/models/secret_key_selector.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SecretKeySelector(BaseModel): + """ + SecretKeySelector selects a key of a Secret. + """ # noqa: E501 + key: StrictStr = Field(description="The key of the secret to select from. Must be a valid secret key.") + name: Optional[StrictStr] = Field(default=None, description="Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + optional: Optional[StrictBool] = Field(default=None, description="Specify whether the Secret or its key must be defined") + __properties: ClassVar[List[str]] = ["key", "name", "optional"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SecretKeySelector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SecretKeySelector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "name": obj.get("name"), + "optional": obj.get("optional") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/secret_projection.py b/sdks/python/client/argo_workflows/models/secret_projection.py new file mode 100644 index 000000000000..4e9729ed6873 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/secret_projection.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.key_to_path import KeyToPath +from typing import Optional, Set +from typing_extensions import Self + +class SecretProjection(BaseModel): + """ + Adapts a secret into a projected volume. The contents of the target Secret's Data field will be presented in a projected volume as files using the keys in the Data field as the file names. Note that this is identical to a secret volume source without the default mode. + """ # noqa: E501 + items: Optional[List[KeyToPath]] = Field(default=None, description="If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.") + name: Optional[StrictStr] = Field(default=None, description="Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + optional: Optional[StrictBool] = Field(default=None, description="Specify whether the Secret or its key must be defined") + __properties: ClassVar[List[str]] = ["items", "name", "optional"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SecretProjection from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SecretProjection from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "items": [KeyToPath.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "name": obj.get("name"), + "optional": obj.get("optional") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/secret_volume_source.py b/sdks/python/client/argo_workflows/models/secret_volume_source.py new file mode 100644 index 000000000000..4525183a7800 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/secret_volume_source.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.key_to_path import KeyToPath +from typing import Optional, Set +from typing_extensions import Self + +class SecretVolumeSource(BaseModel): + """ + Adapts a Secret into a volume. The contents of the target Secret's Data field will be presented in a volume as files using the keys in the Data field as the file names. Secret volumes support ownership management and SELinux relabeling. + """ # noqa: E501 + default_mode: Optional[StrictInt] = Field(default=None, description="Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.", alias="defaultMode") + items: Optional[List[KeyToPath]] = Field(default=None, description="If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.") + optional: Optional[StrictBool] = Field(default=None, description="Specify whether the Secret or its keys must be defined") + secret_name: Optional[StrictStr] = Field(default=None, description="Name of the secret in the pod's namespace to use. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret", alias="secretName") + __properties: ClassVar[List[str]] = ["defaultMode", "items", "optional", "secretName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SecretVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['items'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SecretVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "defaultMode": obj.get("defaultMode"), + "items": [KeyToPath.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, + "optional": obj.get("optional"), + "secretName": obj.get("secretName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/security_context.py b/sdks/python/client/argo_workflows/models/security_context.py new file mode 100644 index 000000000000..205885e5aa88 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/security_context.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.capabilities import Capabilities +from argo_workflows.models.se_linux_options import SELinuxOptions +from argo_workflows.models.seccomp_profile import SeccompProfile +from argo_workflows.models.windows_security_context_options import WindowsSecurityContextOptions +from typing import Optional, Set +from typing_extensions import Self + +class SecurityContext(BaseModel): + """ + SecurityContext holds security configuration that will be applied to a container. Some fields are present in both SecurityContext and PodSecurityContext. When both are set, the values in SecurityContext take precedence. + """ # noqa: E501 + allow_privilege_escalation: Optional[StrictBool] = Field(default=None, description="AllowPrivilegeEscalation controls whether a process can gain more privileges than its parent process. This bool directly controls if the no_new_privs flag will be set on the container process. AllowPrivilegeEscalation is true always when the container is: 1) run as Privileged 2) has CAP_SYS_ADMIN Note that this field cannot be set when spec.os.name is windows.", alias="allowPrivilegeEscalation") + capabilities: Optional[Capabilities] = None + privileged: Optional[StrictBool] = Field(default=None, description="Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. Note that this field cannot be set when spec.os.name is windows.") + proc_mount: Optional[StrictStr] = Field(default=None, description="procMount denotes the type of proc mount to use for the containers. The default is DefaultProcMount which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.", alias="procMount") + read_only_root_filesystem: Optional[StrictBool] = Field(default=None, description="Whether this container has a read-only root filesystem. Default is false. Note that this field cannot be set when spec.os.name is windows.", alias="readOnlyRootFilesystem") + run_as_group: Optional[StrictInt] = Field(default=None, description="The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows.", alias="runAsGroup") + run_as_non_root: Optional[StrictBool] = Field(default=None, description="Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.", alias="runAsNonRoot") + run_as_user: Optional[StrictInt] = Field(default=None, description="The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows.", alias="runAsUser") + se_linux_options: Optional[SELinuxOptions] = Field(default=None, alias="seLinuxOptions") + seccomp_profile: Optional[SeccompProfile] = Field(default=None, alias="seccompProfile") + windows_options: Optional[WindowsSecurityContextOptions] = Field(default=None, alias="windowsOptions") + __properties: ClassVar[List[str]] = ["allowPrivilegeEscalation", "capabilities", "privileged", "procMount", "readOnlyRootFilesystem", "runAsGroup", "runAsNonRoot", "runAsUser", "seLinuxOptions", "seccompProfile", "windowsOptions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SecurityContext from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of capabilities + if self.capabilities: + _dict['capabilities'] = self.capabilities.to_dict() + # override the default output from pydantic by calling `to_dict()` of se_linux_options + if self.se_linux_options: + _dict['seLinuxOptions'] = self.se_linux_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of seccomp_profile + if self.seccomp_profile: + _dict['seccompProfile'] = self.seccomp_profile.to_dict() + # override the default output from pydantic by calling `to_dict()` of windows_options + if self.windows_options: + _dict['windowsOptions'] = self.windows_options.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SecurityContext from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "allowPrivilegeEscalation": obj.get("allowPrivilegeEscalation"), + "capabilities": Capabilities.from_dict(obj["capabilities"]) if obj.get("capabilities") is not None else None, + "privileged": obj.get("privileged"), + "procMount": obj.get("procMount"), + "readOnlyRootFilesystem": obj.get("readOnlyRootFilesystem"), + "runAsGroup": obj.get("runAsGroup"), + "runAsNonRoot": obj.get("runAsNonRoot"), + "runAsUser": obj.get("runAsUser"), + "seLinuxOptions": SELinuxOptions.from_dict(obj["seLinuxOptions"]) if obj.get("seLinuxOptions") is not None else None, + "seccompProfile": SeccompProfile.from_dict(obj["seccompProfile"]) if obj.get("seccompProfile") is not None else None, + "windowsOptions": WindowsSecurityContextOptions.from_dict(obj["windowsOptions"]) if obj.get("windowsOptions") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/sensor_create_sensor_request.py b/sdks/python/client/argo_workflows/models/sensor_create_sensor_request.py new file mode 100644 index 000000000000..753fe156335b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/sensor_create_sensor_request.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.create_options import CreateOptions +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from typing import Optional, Set +from typing_extensions import Self + +class SensorCreateSensorRequest(BaseModel): + """ + SensorCreateSensorRequest + """ # noqa: E501 + create_options: Optional[CreateOptions] = Field(default=None, alias="createOptions") + namespace: Optional[StrictStr] = None + sensor: Optional[IoArgoprojEventsV1alpha1Sensor] = None + __properties: ClassVar[List[str]] = ["createOptions", "namespace", "sensor"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SensorCreateSensorRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of create_options + if self.create_options: + _dict['createOptions'] = self.create_options.to_dict() + # override the default output from pydantic by calling `to_dict()` of sensor + if self.sensor: + _dict['sensor'] = self.sensor.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SensorCreateSensorRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "createOptions": CreateOptions.from_dict(obj["createOptions"]) if obj.get("createOptions") is not None else None, + "namespace": obj.get("namespace"), + "sensor": IoArgoprojEventsV1alpha1Sensor.from_dict(obj["sensor"]) if obj.get("sensor") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/sensor_log_entry.py b/sdks/python/client/argo_workflows/models/sensor_log_entry.py new file mode 100644 index 000000000000..d6941be42601 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/sensor_log_entry.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SensorLogEntry(BaseModel): + """ + SensorLogEntry + """ # noqa: E501 + dependency_name: Optional[StrictStr] = Field(default=None, alias="dependencyName") + event_context: Optional[StrictStr] = Field(default=None, alias="eventContext") + level: Optional[StrictStr] = None + msg: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + sensor_name: Optional[StrictStr] = Field(default=None, alias="sensorName") + time: Optional[datetime] = Field(default=None, description="Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.") + trigger_name: Optional[StrictStr] = Field(default=None, alias="triggerName") + __properties: ClassVar[List[str]] = ["dependencyName", "eventContext", "level", "msg", "namespace", "sensorName", "time", "triggerName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SensorLogEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SensorLogEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "dependencyName": obj.get("dependencyName"), + "eventContext": obj.get("eventContext"), + "level": obj.get("level"), + "msg": obj.get("msg"), + "namespace": obj.get("namespace"), + "sensorName": obj.get("sensorName"), + "time": obj.get("time"), + "triggerName": obj.get("triggerName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/sensor_sensor_watch_event.py b/sdks/python/client/argo_workflows/models/sensor_sensor_watch_event.py new file mode 100644 index 000000000000..53f288cb4d93 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/sensor_sensor_watch_event.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from typing import Optional, Set +from typing_extensions import Self + +class SensorSensorWatchEvent(BaseModel): + """ + SensorSensorWatchEvent + """ # noqa: E501 + object: Optional[IoArgoprojEventsV1alpha1Sensor] = None + type: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["object", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SensorSensorWatchEvent from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of object + if self.object: + _dict['object'] = self.object.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SensorSensorWatchEvent from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "object": IoArgoprojEventsV1alpha1Sensor.from_dict(obj["object"]) if obj.get("object") is not None else None, + "type": obj.get("type") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/sensor_update_sensor_request.py b/sdks/python/client/argo_workflows/models/sensor_update_sensor_request.py new file mode 100644 index 000000000000..712ac8412159 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/sensor_update_sensor_request.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from typing import Optional, Set +from typing_extensions import Self + +class SensorUpdateSensorRequest(BaseModel): + """ + SensorUpdateSensorRequest + """ # noqa: E501 + name: Optional[StrictStr] = None + namespace: Optional[StrictStr] = None + sensor: Optional[IoArgoprojEventsV1alpha1Sensor] = None + __properties: ClassVar[List[str]] = ["name", "namespace", "sensor"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SensorUpdateSensorRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of sensor + if self.sensor: + _dict['sensor'] = self.sensor.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SensorUpdateSensorRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "namespace": obj.get("namespace"), + "sensor": IoArgoprojEventsV1alpha1Sensor.from_dict(obj["sensor"]) if obj.get("sensor") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/service_account_token_projection.py b/sdks/python/client/argo_workflows/models/service_account_token_projection.py new file mode 100644 index 000000000000..3948a5456a34 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/service_account_token_projection.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ServiceAccountTokenProjection(BaseModel): + """ + ServiceAccountTokenProjection represents a projected service account token volume. This projection can be used to insert a service account token into the pods runtime filesystem for use against APIs (Kubernetes API Server or otherwise). + """ # noqa: E501 + audience: Optional[StrictStr] = Field(default=None, description="Audience is the intended audience of the token. A recipient of a token must identify itself with an identifier specified in the audience of the token, and otherwise should reject the token. The audience defaults to the identifier of the apiserver.") + expiration_seconds: Optional[StrictInt] = Field(default=None, description="ExpirationSeconds is the requested duration of validity of the service account token. As the token approaches expiration, the kubelet volume plugin will proactively rotate the service account token. The kubelet will start trying to rotate the token if the token is older than 80 percent of its time to live or if the token is older than 24 hours.Defaults to 1 hour and must be at least 10 minutes.", alias="expirationSeconds") + path: StrictStr = Field(description="Path is the path relative to the mount point of the file to project the token into.") + __properties: ClassVar[List[str]] = ["audience", "expirationSeconds", "path"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ServiceAccountTokenProjection from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ServiceAccountTokenProjection from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "audience": obj.get("audience"), + "expirationSeconds": obj.get("expirationSeconds"), + "path": obj.get("path") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/service_port.py b/sdks/python/client/argo_workflows/models/service_port.py new file mode 100644 index 000000000000..f346563afdb5 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/service_port.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ServicePort(BaseModel): + """ + ServicePort contains information on service's port. + """ # noqa: E501 + app_protocol: Optional[StrictStr] = Field(default=None, description="The application protocol for this port. This field follows standard Kubernetes label syntax. Un-prefixed names are reserved for IANA standard service names (as per RFC-6335 and http://www.iana.org/assignments/service-names). Non-standard protocols should use prefixed names such as mycompany.com/my-custom-protocol.", alias="appProtocol") + name: Optional[StrictStr] = Field(default=None, description="The name of this port within the service. This must be a DNS_LABEL. All ports within a ServiceSpec must have unique names. When considering the endpoints for a Service, this must match the 'name' field in the EndpointPort. Optional if only one ServicePort is defined on this service.") + node_port: Optional[StrictInt] = Field(default=None, description="The port on each node on which this service is exposed when type is NodePort or LoadBalancer. Usually assigned by the system. If a value is specified, in-range, and not in use it will be used, otherwise the operation will fail. If not specified, a port will be allocated if this Service requires one. If this field is specified when creating a Service which does not need it, creation will fail. This field will be wiped when updating a Service to no longer need it (e.g. changing type from NodePort to ClusterIP). More info: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport", alias="nodePort") + port: StrictInt = Field(description="The port that will be exposed by this service.") + protocol: Optional[StrictStr] = Field(default=None, description="The IP protocol for this port. Supports \"TCP\", \"UDP\", and \"SCTP\". Default is TCP. Possible enum values: - `\"SCTP\"` is the SCTP protocol. - `\"TCP\"` is the TCP protocol. - `\"UDP\"` is the UDP protocol.") + target_port: Optional[StrictStr] = Field(default=None, alias="targetPort") + __properties: ClassVar[List[str]] = ["appProtocol", "name", "nodePort", "port", "protocol", "targetPort"] + + @field_validator('protocol') + def protocol_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['SCTP', 'TCP', 'UDP']): + raise ValueError("must be one of enum values ('SCTP', 'TCP', 'UDP')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ServicePort from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ServicePort from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "appProtocol": obj.get("appProtocol"), + "name": obj.get("name"), + "nodePort": obj.get("nodePort"), + "port": obj.get("port"), + "protocol": obj.get("protocol"), + "targetPort": obj.get("targetPort") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/status_cause.py b/sdks/python/client/argo_workflows/models/status_cause.py new file mode 100644 index 000000000000..0586677033b3 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/status_cause.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class StatusCause(BaseModel): + """ + StatusCause provides more information about an api.Status failure, including cases when multiple errors are encountered. + """ # noqa: E501 + field: Optional[StrictStr] = Field(default=None, description="The field of the resource that has caused this error, as named by its JSON serialization. May include dot and postfix notation for nested attributes. Arrays are zero-indexed. Fields may appear more than once in an array of causes due to fields having multiple errors. Optional. Examples: \"name\" - the field \"name\" on the current resource \"items[0].name\" - the field \"name\" on the first array entry in \"items\"") + message: Optional[StrictStr] = Field(default=None, description="A human-readable description of the cause of the error. This field may be presented as-is to a reader.") + reason: Optional[StrictStr] = Field(default=None, description="A machine-readable description of the cause of the error. If this value is empty there is no information available.") + __properties: ClassVar[List[str]] = ["field", "message", "reason"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StatusCause from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StatusCause from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "field": obj.get("field"), + "message": obj.get("message"), + "reason": obj.get("reason") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/storage_os_volume_source.py b/sdks/python/client/argo_workflows/models/storage_os_volume_source.py new file mode 100644 index 000000000000..2f7b8f77bdab --- /dev/null +++ b/sdks/python/client/argo_workflows/models/storage_os_volume_source.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.local_object_reference import LocalObjectReference +from typing import Optional, Set +from typing_extensions import Self + +class StorageOSVolumeSource(BaseModel): + """ + Represents a StorageOS persistent volume resource. + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.", alias="fsType") + read_only: Optional[StrictBool] = Field(default=None, description="Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.", alias="readOnly") + secret_ref: Optional[LocalObjectReference] = Field(default=None, alias="secretRef") + volume_name: Optional[StrictStr] = Field(default=None, description="VolumeName is the human-readable name of the StorageOS volume. Volume names are only unique within a namespace.", alias="volumeName") + volume_namespace: Optional[StrictStr] = Field(default=None, description="VolumeNamespace specifies the scope of the volume within StorageOS. If no namespace is specified then the Pod's namespace will be used. This allows the Kubernetes name scoping to be mirrored within StorageOS for tighter integration. Set VolumeName to any name to override the default behaviour. Set to \"default\" if you are not using namespaces within StorageOS. Namespaces that do not pre-exist within StorageOS will be created.", alias="volumeNamespace") + __properties: ClassVar[List[str]] = ["fsType", "readOnly", "secretRef", "volumeName", "volumeNamespace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StorageOSVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of secret_ref + if self.secret_ref: + _dict['secretRef'] = self.secret_ref.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StorageOSVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "readOnly": obj.get("readOnly"), + "secretRef": LocalObjectReference.from_dict(obj["secretRef"]) if obj.get("secretRef") is not None else None, + "volumeName": obj.get("volumeName"), + "volumeNamespace": obj.get("volumeNamespace") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/stream_result_of_event.py b/sdks/python/client/argo_workflows/models/stream_result_of_event.py new file mode 100644 index 000000000000..b2ff97fbc674 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/stream_result_of_event.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.event import Event +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError +from typing import Optional, Set +from typing_extensions import Self + +class StreamResultOfEvent(BaseModel): + """ + StreamResultOfEvent + """ # noqa: E501 + error: Optional[GrpcGatewayRuntimeStreamError] = None + result: Optional[Event] = None + __properties: ClassVar[List[str]] = ["error", "result"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamResultOfEvent from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of error + if self.error: + _dict['error'] = self.error.to_dict() + # override the default output from pydantic by calling `to_dict()` of result + if self.result: + _dict['result'] = self.result.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamResultOfEvent from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": GrpcGatewayRuntimeStreamError.from_dict(obj["error"]) if obj.get("error") is not None else None, + "result": Event.from_dict(obj["result"]) if obj.get("result") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/stream_result_of_eventsource_event_source_watch_event.py b/sdks/python/client/argo_workflows/models/stream_result_of_eventsource_event_source_watch_event.py new file mode 100644 index 000000000000..90970a310572 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/stream_result_of_eventsource_event_source_watch_event.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.eventsource_event_source_watch_event import EventsourceEventSourceWatchEvent +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError +from typing import Optional, Set +from typing_extensions import Self + +class StreamResultOfEventsourceEventSourceWatchEvent(BaseModel): + """ + StreamResultOfEventsourceEventSourceWatchEvent + """ # noqa: E501 + error: Optional[GrpcGatewayRuntimeStreamError] = None + result: Optional[EventsourceEventSourceWatchEvent] = None + __properties: ClassVar[List[str]] = ["error", "result"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamResultOfEventsourceEventSourceWatchEvent from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of error + if self.error: + _dict['error'] = self.error.to_dict() + # override the default output from pydantic by calling `to_dict()` of result + if self.result: + _dict['result'] = self.result.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamResultOfEventsourceEventSourceWatchEvent from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": GrpcGatewayRuntimeStreamError.from_dict(obj["error"]) if obj.get("error") is not None else None, + "result": EventsourceEventSourceWatchEvent.from_dict(obj["result"]) if obj.get("result") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/stream_result_of_eventsource_log_entry.py b/sdks/python/client/argo_workflows/models/stream_result_of_eventsource_log_entry.py new file mode 100644 index 000000000000..011b25ba74a4 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/stream_result_of_eventsource_log_entry.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.eventsource_log_entry import EventsourceLogEntry +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError +from typing import Optional, Set +from typing_extensions import Self + +class StreamResultOfEventsourceLogEntry(BaseModel): + """ + StreamResultOfEventsourceLogEntry + """ # noqa: E501 + error: Optional[GrpcGatewayRuntimeStreamError] = None + result: Optional[EventsourceLogEntry] = None + __properties: ClassVar[List[str]] = ["error", "result"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamResultOfEventsourceLogEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of error + if self.error: + _dict['error'] = self.error.to_dict() + # override the default output from pydantic by calling `to_dict()` of result + if self.result: + _dict['result'] = self.result.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamResultOfEventsourceLogEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": GrpcGatewayRuntimeStreamError.from_dict(obj["error"]) if obj.get("error") is not None else None, + "result": EventsourceLogEntry.from_dict(obj["result"]) if obj.get("result") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/stream_result_of_io_argoproj_workflow_v1alpha1_log_entry.py b/sdks/python/client/argo_workflows/models/stream_result_of_io_argoproj_workflow_v1alpha1_log_entry.py new file mode 100644 index 000000000000..07cf825eba4b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/stream_result_of_io_argoproj_workflow_v1alpha1_log_entry.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError +from argo_workflows.models.io_argoproj_workflow_v1alpha1_log_entry import IoArgoprojWorkflowV1alpha1LogEntry +from typing import Optional, Set +from typing_extensions import Self + +class StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry(BaseModel): + """ + StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry + """ # noqa: E501 + error: Optional[GrpcGatewayRuntimeStreamError] = None + result: Optional[IoArgoprojWorkflowV1alpha1LogEntry] = None + __properties: ClassVar[List[str]] = ["error", "result"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of error + if self.error: + _dict['error'] = self.error.to_dict() + # override the default output from pydantic by calling `to_dict()` of result + if self.result: + _dict['result'] = self.result.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": GrpcGatewayRuntimeStreamError.from_dict(obj["error"]) if obj.get("error") is not None else None, + "result": IoArgoprojWorkflowV1alpha1LogEntry.from_dict(obj["result"]) if obj.get("result") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event.py b/sdks/python/client/argo_workflows/models/stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event.py new file mode 100644 index 000000000000..f2dd4fb1975f --- /dev/null +++ b/sdks/python/client/argo_workflows/models/stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_watch_event import IoArgoprojWorkflowV1alpha1WorkflowWatchEvent +from typing import Optional, Set +from typing_extensions import Self + +class StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent(BaseModel): + """ + StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent + """ # noqa: E501 + error: Optional[GrpcGatewayRuntimeStreamError] = None + result: Optional[IoArgoprojWorkflowV1alpha1WorkflowWatchEvent] = None + __properties: ClassVar[List[str]] = ["error", "result"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of error + if self.error: + _dict['error'] = self.error.to_dict() + # override the default output from pydantic by calling `to_dict()` of result + if self.result: + _dict['result'] = self.result.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": GrpcGatewayRuntimeStreamError.from_dict(obj["error"]) if obj.get("error") is not None else None, + "result": IoArgoprojWorkflowV1alpha1WorkflowWatchEvent.from_dict(obj["result"]) if obj.get("result") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/stream_result_of_sensor_log_entry.py b/sdks/python/client/argo_workflows/models/stream_result_of_sensor_log_entry.py new file mode 100644 index 000000000000..909599af9947 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/stream_result_of_sensor_log_entry.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError +from argo_workflows.models.sensor_log_entry import SensorLogEntry +from typing import Optional, Set +from typing_extensions import Self + +class StreamResultOfSensorLogEntry(BaseModel): + """ + StreamResultOfSensorLogEntry + """ # noqa: E501 + error: Optional[GrpcGatewayRuntimeStreamError] = None + result: Optional[SensorLogEntry] = None + __properties: ClassVar[List[str]] = ["error", "result"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamResultOfSensorLogEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of error + if self.error: + _dict['error'] = self.error.to_dict() + # override the default output from pydantic by calling `to_dict()` of result + if self.result: + _dict['result'] = self.result.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamResultOfSensorLogEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": GrpcGatewayRuntimeStreamError.from_dict(obj["error"]) if obj.get("error") is not None else None, + "result": SensorLogEntry.from_dict(obj["result"]) if obj.get("result") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/stream_result_of_sensor_sensor_watch_event.py b/sdks/python/client/argo_workflows/models/stream_result_of_sensor_sensor_watch_event.py new file mode 100644 index 000000000000..0e79d5d6f5c1 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/stream_result_of_sensor_sensor_watch_event.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError +from argo_workflows.models.sensor_sensor_watch_event import SensorSensorWatchEvent +from typing import Optional, Set +from typing_extensions import Self + +class StreamResultOfSensorSensorWatchEvent(BaseModel): + """ + StreamResultOfSensorSensorWatchEvent + """ # noqa: E501 + error: Optional[GrpcGatewayRuntimeStreamError] = None + result: Optional[SensorSensorWatchEvent] = None + __properties: ClassVar[List[str]] = ["error", "result"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamResultOfSensorSensorWatchEvent from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of error + if self.error: + _dict['error'] = self.error.to_dict() + # override the default output from pydantic by calling `to_dict()` of result + if self.result: + _dict['result'] = self.result.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamResultOfSensorSensorWatchEvent from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": GrpcGatewayRuntimeStreamError.from_dict(obj["error"]) if obj.get("error") is not None else None, + "result": SensorSensorWatchEvent.from_dict(obj["result"]) if obj.get("result") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/sysctl.py b/sdks/python/client/argo_workflows/models/sysctl.py new file mode 100644 index 000000000000..edbc8ea9e342 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/sysctl.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Sysctl(BaseModel): + """ + Sysctl defines a kernel parameter to be set + """ # noqa: E501 + name: StrictStr = Field(description="Name of a property to set") + value: StrictStr = Field(description="Value of a property to set") + __properties: ClassVar[List[str]] = ["name", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Sysctl from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Sysctl from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/tcp_socket_action.py b/sdks/python/client/argo_workflows/models/tcp_socket_action.py new file mode 100644 index 000000000000..8db731027533 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/tcp_socket_action.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class TCPSocketAction(BaseModel): + """ + TCPSocketAction describes an action based on opening a socket + """ # noqa: E501 + host: Optional[StrictStr] = Field(default=None, description="Optional: Host name to connect to, defaults to the pod IP.") + port: StrictStr + __properties: ClassVar[List[str]] = ["host", "port"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of TCPSocketAction from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of TCPSocketAction from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "host": obj.get("host"), + "port": obj.get("port") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/toleration.py b/sdks/python/client/argo_workflows/models/toleration.py new file mode 100644 index 000000000000..e1e13119b4d0 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/toleration.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Toleration(BaseModel): + """ + The pod this Toleration is attached to tolerates any taint that matches the triple using the matching operator . + """ # noqa: E501 + effect: Optional[StrictStr] = Field(default=None, description="Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute. Possible enum values: - `\"NoExecute\"` Evict any already-running pods that do not tolerate the taint. Currently enforced by NodeController. - `\"NoSchedule\"` Do not allow new pods to schedule onto the node unless they tolerate the taint, but allow all pods submitted to Kubelet without going through the scheduler to start, and allow all already-running pods to continue running. Enforced by the scheduler. - `\"PreferNoSchedule\"` Like TaintEffectNoSchedule, but the scheduler tries not to schedule new pods onto the node, rather than prohibiting new pods from scheduling onto the node entirely. Enforced by the scheduler.") + key: Optional[StrictStr] = Field(default=None, description="Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys.") + operator: Optional[StrictStr] = Field(default=None, description="Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category. Possible enum values: - `\"Equal\"` - `\"Exists\"`") + toleration_seconds: Optional[StrictInt] = Field(default=None, description="TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system.", alias="tolerationSeconds") + value: Optional[StrictStr] = Field(default=None, description="Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string.") + __properties: ClassVar[List[str]] = ["effect", "key", "operator", "tolerationSeconds", "value"] + + @field_validator('effect') + def effect_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['NoExecute', 'NoSchedule', 'PreferNoSchedule']): + raise ValueError("must be one of enum values ('NoExecute', 'NoSchedule', 'PreferNoSchedule')") + return value + + @field_validator('operator') + def operator_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Equal', 'Exists']): + raise ValueError("must be one of enum values ('Equal', 'Exists')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Toleration from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Toleration from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "effect": obj.get("effect"), + "key": obj.get("key"), + "operator": obj.get("operator"), + "tolerationSeconds": obj.get("tolerationSeconds"), + "value": obj.get("value") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/typed_local_object_reference.py b/sdks/python/client/argo_workflows/models/typed_local_object_reference.py new file mode 100644 index 000000000000..63884bdddd96 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/typed_local_object_reference.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class TypedLocalObjectReference(BaseModel): + """ + TypedLocalObjectReference contains enough information to let you locate the typed referenced object inside the same namespace. + """ # noqa: E501 + api_group: Optional[StrictStr] = Field(default=None, description="APIGroup is the group for the resource being referenced. If APIGroup is not specified, the specified Kind must be in the core API group. For any other third-party types, APIGroup is required.", alias="apiGroup") + kind: StrictStr = Field(description="Kind is the type of resource being referenced") + name: StrictStr = Field(description="Name is the name of resource being referenced") + __properties: ClassVar[List[str]] = ["apiGroup", "kind", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of TypedLocalObjectReference from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of TypedLocalObjectReference from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "apiGroup": obj.get("apiGroup"), + "kind": obj.get("kind"), + "name": obj.get("name") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/volume.py b/sdks/python/client/argo_workflows/models/volume.py new file mode 100644 index 000000000000..e9c36346eaf2 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/volume.py @@ -0,0 +1,261 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.aws_elastic_block_store_volume_source import AWSElasticBlockStoreVolumeSource +from argo_workflows.models.azure_disk_volume_source import AzureDiskVolumeSource +from argo_workflows.models.azure_file_volume_source import AzureFileVolumeSource +from argo_workflows.models.ceph_fs_volume_source import CephFSVolumeSource +from argo_workflows.models.cinder_volume_source import CinderVolumeSource +from argo_workflows.models.config_map_volume_source import ConfigMapVolumeSource +from argo_workflows.models.csi_volume_source import CSIVolumeSource +from argo_workflows.models.downward_api_volume_source import DownwardAPIVolumeSource +from argo_workflows.models.empty_dir_volume_source import EmptyDirVolumeSource +from argo_workflows.models.ephemeral_volume_source import EphemeralVolumeSource +from argo_workflows.models.fc_volume_source import FCVolumeSource +from argo_workflows.models.flex_volume_source import FlexVolumeSource +from argo_workflows.models.flocker_volume_source import FlockerVolumeSource +from argo_workflows.models.gce_persistent_disk_volume_source import GCEPersistentDiskVolumeSource +from argo_workflows.models.git_repo_volume_source import GitRepoVolumeSource +from argo_workflows.models.glusterfs_volume_source import GlusterfsVolumeSource +from argo_workflows.models.host_path_volume_source import HostPathVolumeSource +from argo_workflows.models.iscsi_volume_source import ISCSIVolumeSource +from argo_workflows.models.nfs_volume_source import NFSVolumeSource +from argo_workflows.models.persistent_volume_claim_volume_source import PersistentVolumeClaimVolumeSource +from argo_workflows.models.photon_persistent_disk_volume_source import PhotonPersistentDiskVolumeSource +from argo_workflows.models.portworx_volume_source import PortworxVolumeSource +from argo_workflows.models.projected_volume_source import ProjectedVolumeSource +from argo_workflows.models.quobyte_volume_source import QuobyteVolumeSource +from argo_workflows.models.rbd_volume_source import RBDVolumeSource +from argo_workflows.models.scale_io_volume_source import ScaleIOVolumeSource +from argo_workflows.models.secret_volume_source import SecretVolumeSource +from argo_workflows.models.storage_os_volume_source import StorageOSVolumeSource +from argo_workflows.models.vsphere_virtual_disk_volume_source import VsphereVirtualDiskVolumeSource +from typing import Optional, Set +from typing_extensions import Self + +class Volume(BaseModel): + """ + Volume represents a named volume in a pod that may be accessed by any container in the pod. + """ # noqa: E501 + aws_elastic_block_store: Optional[AWSElasticBlockStoreVolumeSource] = Field(default=None, alias="awsElasticBlockStore") + azure_disk: Optional[AzureDiskVolumeSource] = Field(default=None, alias="azureDisk") + azure_file: Optional[AzureFileVolumeSource] = Field(default=None, alias="azureFile") + cephfs: Optional[CephFSVolumeSource] = None + cinder: Optional[CinderVolumeSource] = None + config_map: Optional[ConfigMapVolumeSource] = Field(default=None, alias="configMap") + csi: Optional[CSIVolumeSource] = None + downward_api: Optional[DownwardAPIVolumeSource] = Field(default=None, alias="downwardAPI") + empty_dir: Optional[EmptyDirVolumeSource] = Field(default=None, alias="emptyDir") + ephemeral: Optional[EphemeralVolumeSource] = None + fc: Optional[FCVolumeSource] = None + flex_volume: Optional[FlexVolumeSource] = Field(default=None, alias="flexVolume") + flocker: Optional[FlockerVolumeSource] = None + gce_persistent_disk: Optional[GCEPersistentDiskVolumeSource] = Field(default=None, alias="gcePersistentDisk") + git_repo: Optional[GitRepoVolumeSource] = Field(default=None, alias="gitRepo") + glusterfs: Optional[GlusterfsVolumeSource] = None + host_path: Optional[HostPathVolumeSource] = Field(default=None, alias="hostPath") + iscsi: Optional[ISCSIVolumeSource] = None + name: StrictStr = Field(description="Volume's name. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names") + nfs: Optional[NFSVolumeSource] = None + persistent_volume_claim: Optional[PersistentVolumeClaimVolumeSource] = Field(default=None, alias="persistentVolumeClaim") + photon_persistent_disk: Optional[PhotonPersistentDiskVolumeSource] = Field(default=None, alias="photonPersistentDisk") + portworx_volume: Optional[PortworxVolumeSource] = Field(default=None, alias="portworxVolume") + projected: Optional[ProjectedVolumeSource] = None + quobyte: Optional[QuobyteVolumeSource] = None + rbd: Optional[RBDVolumeSource] = None + scale_io: Optional[ScaleIOVolumeSource] = Field(default=None, alias="scaleIO") + secret: Optional[SecretVolumeSource] = None + storageos: Optional[StorageOSVolumeSource] = None + vsphere_volume: Optional[VsphereVirtualDiskVolumeSource] = Field(default=None, alias="vsphereVolume") + __properties: ClassVar[List[str]] = ["awsElasticBlockStore", "azureDisk", "azureFile", "cephfs", "cinder", "configMap", "csi", "downwardAPI", "emptyDir", "ephemeral", "fc", "flexVolume", "flocker", "gcePersistentDisk", "gitRepo", "glusterfs", "hostPath", "iscsi", "name", "nfs", "persistentVolumeClaim", "photonPersistentDisk", "portworxVolume", "projected", "quobyte", "rbd", "scaleIO", "secret", "storageos", "vsphereVolume"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Volume from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of aws_elastic_block_store + if self.aws_elastic_block_store: + _dict['awsElasticBlockStore'] = self.aws_elastic_block_store.to_dict() + # override the default output from pydantic by calling `to_dict()` of azure_disk + if self.azure_disk: + _dict['azureDisk'] = self.azure_disk.to_dict() + # override the default output from pydantic by calling `to_dict()` of azure_file + if self.azure_file: + _dict['azureFile'] = self.azure_file.to_dict() + # override the default output from pydantic by calling `to_dict()` of cephfs + if self.cephfs: + _dict['cephfs'] = self.cephfs.to_dict() + # override the default output from pydantic by calling `to_dict()` of cinder + if self.cinder: + _dict['cinder'] = self.cinder.to_dict() + # override the default output from pydantic by calling `to_dict()` of config_map + if self.config_map: + _dict['configMap'] = self.config_map.to_dict() + # override the default output from pydantic by calling `to_dict()` of csi + if self.csi: + _dict['csi'] = self.csi.to_dict() + # override the default output from pydantic by calling `to_dict()` of downward_api + if self.downward_api: + _dict['downwardAPI'] = self.downward_api.to_dict() + # override the default output from pydantic by calling `to_dict()` of empty_dir + if self.empty_dir: + _dict['emptyDir'] = self.empty_dir.to_dict() + # override the default output from pydantic by calling `to_dict()` of ephemeral + if self.ephemeral: + _dict['ephemeral'] = self.ephemeral.to_dict() + # override the default output from pydantic by calling `to_dict()` of fc + if self.fc: + _dict['fc'] = self.fc.to_dict() + # override the default output from pydantic by calling `to_dict()` of flex_volume + if self.flex_volume: + _dict['flexVolume'] = self.flex_volume.to_dict() + # override the default output from pydantic by calling `to_dict()` of flocker + if self.flocker: + _dict['flocker'] = self.flocker.to_dict() + # override the default output from pydantic by calling `to_dict()` of gce_persistent_disk + if self.gce_persistent_disk: + _dict['gcePersistentDisk'] = self.gce_persistent_disk.to_dict() + # override the default output from pydantic by calling `to_dict()` of git_repo + if self.git_repo: + _dict['gitRepo'] = self.git_repo.to_dict() + # override the default output from pydantic by calling `to_dict()` of glusterfs + if self.glusterfs: + _dict['glusterfs'] = self.glusterfs.to_dict() + # override the default output from pydantic by calling `to_dict()` of host_path + if self.host_path: + _dict['hostPath'] = self.host_path.to_dict() + # override the default output from pydantic by calling `to_dict()` of iscsi + if self.iscsi: + _dict['iscsi'] = self.iscsi.to_dict() + # override the default output from pydantic by calling `to_dict()` of nfs + if self.nfs: + _dict['nfs'] = self.nfs.to_dict() + # override the default output from pydantic by calling `to_dict()` of persistent_volume_claim + if self.persistent_volume_claim: + _dict['persistentVolumeClaim'] = self.persistent_volume_claim.to_dict() + # override the default output from pydantic by calling `to_dict()` of photon_persistent_disk + if self.photon_persistent_disk: + _dict['photonPersistentDisk'] = self.photon_persistent_disk.to_dict() + # override the default output from pydantic by calling `to_dict()` of portworx_volume + if self.portworx_volume: + _dict['portworxVolume'] = self.portworx_volume.to_dict() + # override the default output from pydantic by calling `to_dict()` of projected + if self.projected: + _dict['projected'] = self.projected.to_dict() + # override the default output from pydantic by calling `to_dict()` of quobyte + if self.quobyte: + _dict['quobyte'] = self.quobyte.to_dict() + # override the default output from pydantic by calling `to_dict()` of rbd + if self.rbd: + _dict['rbd'] = self.rbd.to_dict() + # override the default output from pydantic by calling `to_dict()` of scale_io + if self.scale_io: + _dict['scaleIO'] = self.scale_io.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret + if self.secret: + _dict['secret'] = self.secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of storageos + if self.storageos: + _dict['storageos'] = self.storageos.to_dict() + # override the default output from pydantic by calling `to_dict()` of vsphere_volume + if self.vsphere_volume: + _dict['vsphereVolume'] = self.vsphere_volume.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Volume from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "awsElasticBlockStore": AWSElasticBlockStoreVolumeSource.from_dict(obj["awsElasticBlockStore"]) if obj.get("awsElasticBlockStore") is not None else None, + "azureDisk": AzureDiskVolumeSource.from_dict(obj["azureDisk"]) if obj.get("azureDisk") is not None else None, + "azureFile": AzureFileVolumeSource.from_dict(obj["azureFile"]) if obj.get("azureFile") is not None else None, + "cephfs": CephFSVolumeSource.from_dict(obj["cephfs"]) if obj.get("cephfs") is not None else None, + "cinder": CinderVolumeSource.from_dict(obj["cinder"]) if obj.get("cinder") is not None else None, + "configMap": ConfigMapVolumeSource.from_dict(obj["configMap"]) if obj.get("configMap") is not None else None, + "csi": CSIVolumeSource.from_dict(obj["csi"]) if obj.get("csi") is not None else None, + "downwardAPI": DownwardAPIVolumeSource.from_dict(obj["downwardAPI"]) if obj.get("downwardAPI") is not None else None, + "emptyDir": EmptyDirVolumeSource.from_dict(obj["emptyDir"]) if obj.get("emptyDir") is not None else None, + "ephemeral": EphemeralVolumeSource.from_dict(obj["ephemeral"]) if obj.get("ephemeral") is not None else None, + "fc": FCVolumeSource.from_dict(obj["fc"]) if obj.get("fc") is not None else None, + "flexVolume": FlexVolumeSource.from_dict(obj["flexVolume"]) if obj.get("flexVolume") is not None else None, + "flocker": FlockerVolumeSource.from_dict(obj["flocker"]) if obj.get("flocker") is not None else None, + "gcePersistentDisk": GCEPersistentDiskVolumeSource.from_dict(obj["gcePersistentDisk"]) if obj.get("gcePersistentDisk") is not None else None, + "gitRepo": GitRepoVolumeSource.from_dict(obj["gitRepo"]) if obj.get("gitRepo") is not None else None, + "glusterfs": GlusterfsVolumeSource.from_dict(obj["glusterfs"]) if obj.get("glusterfs") is not None else None, + "hostPath": HostPathVolumeSource.from_dict(obj["hostPath"]) if obj.get("hostPath") is not None else None, + "iscsi": ISCSIVolumeSource.from_dict(obj["iscsi"]) if obj.get("iscsi") is not None else None, + "name": obj.get("name"), + "nfs": NFSVolumeSource.from_dict(obj["nfs"]) if obj.get("nfs") is not None else None, + "persistentVolumeClaim": PersistentVolumeClaimVolumeSource.from_dict(obj["persistentVolumeClaim"]) if obj.get("persistentVolumeClaim") is not None else None, + "photonPersistentDisk": PhotonPersistentDiskVolumeSource.from_dict(obj["photonPersistentDisk"]) if obj.get("photonPersistentDisk") is not None else None, + "portworxVolume": PortworxVolumeSource.from_dict(obj["portworxVolume"]) if obj.get("portworxVolume") is not None else None, + "projected": ProjectedVolumeSource.from_dict(obj["projected"]) if obj.get("projected") is not None else None, + "quobyte": QuobyteVolumeSource.from_dict(obj["quobyte"]) if obj.get("quobyte") is not None else None, + "rbd": RBDVolumeSource.from_dict(obj["rbd"]) if obj.get("rbd") is not None else None, + "scaleIO": ScaleIOVolumeSource.from_dict(obj["scaleIO"]) if obj.get("scaleIO") is not None else None, + "secret": SecretVolumeSource.from_dict(obj["secret"]) if obj.get("secret") is not None else None, + "storageos": StorageOSVolumeSource.from_dict(obj["storageos"]) if obj.get("storageos") is not None else None, + "vsphereVolume": VsphereVirtualDiskVolumeSource.from_dict(obj["vsphereVolume"]) if obj.get("vsphereVolume") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/volume_device.py b/sdks/python/client/argo_workflows/models/volume_device.py new file mode 100644 index 000000000000..278541ad52f9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/volume_device.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class VolumeDevice(BaseModel): + """ + volumeDevice describes a mapping of a raw block device within a container. + """ # noqa: E501 + device_path: StrictStr = Field(description="devicePath is the path inside of the container that the device will be mapped to.", alias="devicePath") + name: StrictStr = Field(description="name must match the name of a persistentVolumeClaim in the pod") + __properties: ClassVar[List[str]] = ["devicePath", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VolumeDevice from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VolumeDevice from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "devicePath": obj.get("devicePath"), + "name": obj.get("name") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/volume_mount.py b/sdks/python/client/argo_workflows/models/volume_mount.py new file mode 100644 index 000000000000..e16b48331e8b --- /dev/null +++ b/sdks/python/client/argo_workflows/models/volume_mount.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class VolumeMount(BaseModel): + """ + VolumeMount describes a mounting of a Volume within a container. + """ # noqa: E501 + mount_path: StrictStr = Field(description="Path within the container at which the volume should be mounted. Must not contain ':'.", alias="mountPath") + mount_propagation: Optional[StrictStr] = Field(default=None, description="mountPropagation determines how mounts are propagated from the host to container and the other way around. When not set, MountPropagationNone is used. This field is beta in 1.10.", alias="mountPropagation") + name: StrictStr = Field(description="This must match the Name of a Volume.") + read_only: Optional[StrictBool] = Field(default=None, description="Mounted read-only if true, read-write otherwise (false or unspecified). Defaults to false.", alias="readOnly") + sub_path: Optional[StrictStr] = Field(default=None, description="Path within the volume from which the container's volume should be mounted. Defaults to \"\" (volume's root).", alias="subPath") + sub_path_expr: Optional[StrictStr] = Field(default=None, description="Expanded path within the volume from which the container's volume should be mounted. Behaves similarly to SubPath but environment variable references $(VAR_NAME) are expanded using the container's environment. Defaults to \"\" (volume's root). SubPathExpr and SubPath are mutually exclusive.", alias="subPathExpr") + __properties: ClassVar[List[str]] = ["mountPath", "mountPropagation", "name", "readOnly", "subPath", "subPathExpr"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VolumeMount from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VolumeMount from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "mountPath": obj.get("mountPath"), + "mountPropagation": obj.get("mountPropagation"), + "name": obj.get("name"), + "readOnly": obj.get("readOnly"), + "subPath": obj.get("subPath"), + "subPathExpr": obj.get("subPathExpr") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/volume_projection.py b/sdks/python/client/argo_workflows/models/volume_projection.py new file mode 100644 index 000000000000..2c40f2faa9d6 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/volume_projection.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from argo_workflows.models.config_map_projection import ConfigMapProjection +from argo_workflows.models.downward_api_projection import DownwardAPIProjection +from argo_workflows.models.secret_projection import SecretProjection +from argo_workflows.models.service_account_token_projection import ServiceAccountTokenProjection +from typing import Optional, Set +from typing_extensions import Self + +class VolumeProjection(BaseModel): + """ + Projection that may be projected along with other supported volume types + """ # noqa: E501 + config_map: Optional[ConfigMapProjection] = Field(default=None, alias="configMap") + downward_api: Optional[DownwardAPIProjection] = Field(default=None, alias="downwardAPI") + secret: Optional[SecretProjection] = None + service_account_token: Optional[ServiceAccountTokenProjection] = Field(default=None, alias="serviceAccountToken") + __properties: ClassVar[List[str]] = ["configMap", "downwardAPI", "secret", "serviceAccountToken"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VolumeProjection from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config_map + if self.config_map: + _dict['configMap'] = self.config_map.to_dict() + # override the default output from pydantic by calling `to_dict()` of downward_api + if self.downward_api: + _dict['downwardAPI'] = self.downward_api.to_dict() + # override the default output from pydantic by calling `to_dict()` of secret + if self.secret: + _dict['secret'] = self.secret.to_dict() + # override the default output from pydantic by calling `to_dict()` of service_account_token + if self.service_account_token: + _dict['serviceAccountToken'] = self.service_account_token.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VolumeProjection from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configMap": ConfigMapProjection.from_dict(obj["configMap"]) if obj.get("configMap") is not None else None, + "downwardAPI": DownwardAPIProjection.from_dict(obj["downwardAPI"]) if obj.get("downwardAPI") is not None else None, + "secret": SecretProjection.from_dict(obj["secret"]) if obj.get("secret") is not None else None, + "serviceAccountToken": ServiceAccountTokenProjection.from_dict(obj["serviceAccountToken"]) if obj.get("serviceAccountToken") is not None else None + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/vsphere_virtual_disk_volume_source.py b/sdks/python/client/argo_workflows/models/vsphere_virtual_disk_volume_source.py new file mode 100644 index 000000000000..6d0b7c098cd9 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/vsphere_virtual_disk_volume_source.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class VsphereVirtualDiskVolumeSource(BaseModel): + """ + Represents a vSphere volume resource. + """ # noqa: E501 + fs_type: Optional[StrictStr] = Field(default=None, description="Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified.", alias="fsType") + storage_policy_id: Optional[StrictStr] = Field(default=None, description="Storage Policy Based Management (SPBM) profile ID associated with the StoragePolicyName.", alias="storagePolicyID") + storage_policy_name: Optional[StrictStr] = Field(default=None, description="Storage Policy Based Management (SPBM) profile name.", alias="storagePolicyName") + volume_path: StrictStr = Field(description="Path that identifies vSphere volume vmdk", alias="volumePath") + __properties: ClassVar[List[str]] = ["fsType", "storagePolicyID", "storagePolicyName", "volumePath"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VsphereVirtualDiskVolumeSource from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VsphereVirtualDiskVolumeSource from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fsType": obj.get("fsType"), + "storagePolicyID": obj.get("storagePolicyID"), + "storagePolicyName": obj.get("storagePolicyName"), + "volumePath": obj.get("volumePath") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/weighted_pod_affinity_term.py b/sdks/python/client/argo_workflows/models/weighted_pod_affinity_term.py new file mode 100644 index 000000000000..a16aa3bbfc3c --- /dev/null +++ b/sdks/python/client/argo_workflows/models/weighted_pod_affinity_term.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List +from argo_workflows.models.pod_affinity_term import PodAffinityTerm +from typing import Optional, Set +from typing_extensions import Self + +class WeightedPodAffinityTerm(BaseModel): + """ + The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s) + """ # noqa: E501 + pod_affinity_term: PodAffinityTerm = Field(alias="podAffinityTerm") + weight: StrictInt = Field(description="weight associated with matching the corresponding podAffinityTerm, in the range 1-100.") + __properties: ClassVar[List[str]] = ["podAffinityTerm", "weight"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WeightedPodAffinityTerm from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pod_affinity_term + if self.pod_affinity_term: + _dict['podAffinityTerm'] = self.pod_affinity_term.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WeightedPodAffinityTerm from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "podAffinityTerm": PodAffinityTerm.from_dict(obj["podAffinityTerm"]) if obj.get("podAffinityTerm") is not None else None, + "weight": obj.get("weight") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/models/windows_security_context_options.py b/sdks/python/client/argo_workflows/models/windows_security_context_options.py new file mode 100644 index 000000000000..32738ecd9189 --- /dev/null +++ b/sdks/python/client/argo_workflows/models/windows_security_context_options.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + + The version of the OpenAPI document: VERSION + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class WindowsSecurityContextOptions(BaseModel): + """ + WindowsSecurityContextOptions contain Windows-specific options and credentials. + """ # noqa: E501 + gmsa_credential_spec: Optional[StrictStr] = Field(default=None, description="GMSACredentialSpec is where the GMSA admission webhook (https://github.com/kubernetes-sigs/windows-gmsa) inlines the contents of the GMSA credential spec named by the GMSACredentialSpecName field.", alias="gmsaCredentialSpec") + gmsa_credential_spec_name: Optional[StrictStr] = Field(default=None, description="GMSACredentialSpecName is the name of the GMSA credential spec to use.", alias="gmsaCredentialSpecName") + host_process: Optional[StrictBool] = Field(default=None, description="HostProcess determines if a container should be run as a 'Host Process' container. This field is alpha-level and will only be honored by components that enable the WindowsHostProcessContainers feature flag. Setting this field without the feature flag will result in errors when validating the Pod. All of a Pod's containers must have the same effective HostProcess value (it is not allowed to have a mix of HostProcess containers and non-HostProcess containers). In addition, if HostProcess is true then HostNetwork must also be set to true.", alias="hostProcess") + run_as_user_name: Optional[StrictStr] = Field(default=None, description="The UserName in Windows to run the entrypoint of the container process. Defaults to the user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.", alias="runAsUserName") + __properties: ClassVar[List[str]] = ["gmsaCredentialSpec", "gmsaCredentialSpecName", "hostProcess", "runAsUserName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WindowsSecurityContextOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WindowsSecurityContextOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "gmsaCredentialSpec": obj.get("gmsaCredentialSpec"), + "gmsaCredentialSpecName": obj.get("gmsaCredentialSpecName"), + "hostProcess": obj.get("hostProcess"), + "runAsUserName": obj.get("runAsUserName") + }) + return _obj + + diff --git a/sdks/python/client/argo_workflows/py.typed b/sdks/python/client/argo_workflows/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/python/client/argo_workflows/rest.py b/sdks/python/client/argo_workflows/rest.py index 687e870b85b5..147b2a3021cf 100644 --- a/sdks/python/client/argo_workflows/rest.py +++ b/sdks/python/client/argo_workflows/rest.py @@ -1,54 +1,68 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 import io import json -import logging import re import ssl -from urllib.parse import urlencode -from urllib.parse import urlparse -from urllib.request import proxy_bypass_environment + import urllib3 -import ipaddress -from argo_workflows.exceptions import ApiException, UnauthorizedException, ForbiddenException, NotFoundException, ServiceException, ApiValueError +from argo_workflows.exceptions import ApiException, ApiValueError +SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"} +RESTResponseType = urllib3.HTTPResponse -logger = logging.getLogger(__name__) + +def is_socks_proxy_url(url): + if url is None: + return False + split_section = url.split("://") + if len(split_section) < 2: + return False + else: + return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES class RESTResponse(io.IOBase): - def __init__(self, resp): - self.urllib3_response = resp + def __init__(self, resp) -> None: + self.response = resp self.status = resp.status self.reason = resp.reason - self.data = resp.data + self.data = None + + def read(self): + if self.data is None: + self.data = self.response.data + return self.data def getheaders(self): """Returns a dictionary of the response headers.""" - return self.urllib3_response.getheaders() + return self.response.headers def getheader(self, name, default=None): """Returns a given response header.""" - return self.urllib3_response.getheader(name, default) + return self.response.headers.get(name, default) -class RESTClientObject(object): +class RESTClientObject: - def __init__(self, configuration, pools_size=4, maxsize=None): + def __init__(self, configuration) -> None: # urllib3.PoolManager will pass all kw parameters to connectionpool # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 - # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 # cert_reqs @@ -57,70 +71,79 @@ def __init__(self, configuration, pools_size=4, maxsize=None): else: cert_reqs = ssl.CERT_NONE - addition_pool_args = {} + pool_args = { + "cert_reqs": cert_reqs, + "ca_certs": configuration.ssl_ca_cert, + "cert_file": configuration.cert_file, + "key_file": configuration.key_file, + } if configuration.assert_hostname is not None: - addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + pool_args['assert_hostname'] = ( + configuration.assert_hostname + ) if configuration.retries is not None: - addition_pool_args['retries'] = configuration.retries + pool_args['retries'] = configuration.retries + + if configuration.tls_server_name: + pool_args['server_hostname'] = configuration.tls_server_name + if configuration.socket_options is not None: - addition_pool_args['socket_options'] = configuration.socket_options + pool_args['socket_options'] = configuration.socket_options - if maxsize is None: - if configuration.connection_pool_maxsize is not None: - maxsize = configuration.connection_pool_maxsize - else: - maxsize = 4 + if configuration.connection_pool_maxsize is not None: + pool_args['maxsize'] = configuration.connection_pool_maxsize # https pool manager - if configuration.proxy and not should_bypass_proxies(configuration.host, no_proxy=configuration.no_proxy or ''): - self.pool_manager = urllib3.ProxyManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=configuration.ssl_ca_cert, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - proxy_url=configuration.proxy, - proxy_headers=configuration.proxy_headers, - **addition_pool_args - ) + self.pool_manager: urllib3.PoolManager + + if configuration.proxy: + if is_socks_proxy_url(configuration.proxy): + from urllib3.contrib.socks import SOCKSProxyManager + pool_args["proxy_url"] = configuration.proxy + pool_args["headers"] = configuration.proxy_headers + self.pool_manager = SOCKSProxyManager(**pool_args) + else: + pool_args["proxy_url"] = configuration.proxy + pool_args["proxy_headers"] = configuration.proxy_headers + self.pool_manager = urllib3.ProxyManager(**pool_args) else: - self.pool_manager = urllib3.PoolManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=configuration.ssl_ca_cert, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - **addition_pool_args - ) - - def request(self, method, url, query_params=None, headers=None, - body=None, post_params=None, _preload_content=True, - _request_timeout=None): + self.pool_manager = urllib3.PoolManager(**pool_args) + + def request( + self, + method, + url, + headers=None, + body=None, + post_params=None, + _request_timeout=None + ): """Perform requests. :param method: http request method :param url: http request url - :param query_params: query parameters in the url :param headers: http request headers :param body: request json body, for `application/json` :param post_params: request post parameters, `application/x-www-form-urlencoded` and `multipart/form-data` - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. """ method = method.upper() - assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', - 'PATCH', 'OPTIONS'] + assert method in [ + 'GET', + 'HEAD', + 'DELETE', + 'POST', + 'PUT', + 'PATCH', + 'OPTIONS' + ] if post_params and body: raise ApiValueError( @@ -132,60 +155,81 @@ def request(self, method, url, query_params=None, headers=None, timeout = None if _request_timeout: - if isinstance(_request_timeout, (int, float)): # noqa: E501,F821 + if isinstance(_request_timeout, (int, float)): timeout = urllib3.Timeout(total=_request_timeout) - elif (isinstance(_request_timeout, tuple) and - len(_request_timeout) == 2): + elif ( + isinstance(_request_timeout, tuple) + and len(_request_timeout) == 2 + ): timeout = urllib3.Timeout( - connect=_request_timeout[0], read=_request_timeout[1]) + connect=_request_timeout[0], + read=_request_timeout[1] + ) try: # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: - # Only set a default Content-Type for POST, PUT, PATCH and OPTIONS requests - if (method != 'DELETE') and ('Content-Type' not in headers): - headers['Content-Type'] = 'application/json' - if query_params: - url += '?' + urlencode(query_params) - if ('Content-Type' not in headers) or (re.search('json', headers['Content-Type'], re.IGNORECASE)): + + # no content type provided or payload is json + content_type = headers.get('Content-Type') + if ( + not content_type + or re.search('json', content_type, re.IGNORECASE) + ): request_body = None if body is not None: request_body = json.dumps(body) r = self.pool_manager.request( - method, url, + method, + url, body=request_body, - preload_content=_preload_content, timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + headers=headers, + preload_content=False + ) + elif content_type == 'application/x-www-form-urlencoded': r = self.pool_manager.request( - method, url, + method, + url, fields=post_params, encode_multipart=False, - preload_content=_preload_content, timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'multipart/form-data': + headers=headers, + preload_content=False + ) + elif content_type == 'multipart/form-data': # must del headers['Content-Type'], or the correct # Content-Type which generated by urllib3 will be # overwritten. del headers['Content-Type'] r = self.pool_manager.request( - method, url, + method, + url, fields=post_params, encode_multipart=True, - preload_content=_preload_content, timeout=timeout, - headers=headers) + headers=headers, + preload_content=False + ) # Pass a `string` parameter directly in the body to support - # other content types than Json when `body` argument is - # provided in serialized form + # other content types than JSON when `body` argument is + # provided in serialized form. elif isinstance(body, str) or isinstance(body, bytes): - request_body = body r = self.pool_manager.request( - method, url, + method, + url, + body=body, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif headers['Content-Type'] == 'text/plain' and isinstance(body, bool): + request_body = "true" if body else "false" + r = self.pool_manager.request( + method, + url, body=request_body, - preload_content=_preload_content, + preload_content=False, timeout=timeout, headers=headers) else: @@ -196,151 +240,16 @@ def request(self, method, url, query_params=None, headers=None, raise ApiException(status=0, reason=msg) # For `GET`, `HEAD` else: - r = self.pool_manager.request(method, url, - fields=query_params, - preload_content=_preload_content, - timeout=timeout, - headers=headers) + r = self.pool_manager.request( + method, + url, + fields={}, + timeout=timeout, + headers=headers, + preload_content=False + ) except urllib3.exceptions.SSLError as e: - msg = "{0}\n{1}".format(type(e).__name__, str(e)) + msg = "\n".join([type(e).__name__, str(e)]) raise ApiException(status=0, reason=msg) - if _preload_content: - r = RESTResponse(r) - - # log response body - logger.debug("response body: %s", r.data) - - if not 200 <= r.status <= 299: - if r.status == 401: - raise UnauthorizedException(http_resp=r) - - if r.status == 403: - raise ForbiddenException(http_resp=r) - - if r.status == 404: - raise NotFoundException(http_resp=r) - - if 500 <= r.status <= 599: - raise ServiceException(http_resp=r) - - raise ApiException(http_resp=r) - - return r - - def GET(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("GET", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def HEAD(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("HEAD", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def OPTIONS(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("OPTIONS", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def DELETE(self, url, headers=None, query_params=None, body=None, - _preload_content=True, _request_timeout=None): - return self.request("DELETE", url, - headers=headers, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def POST(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("POST", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PUT(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PUT", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PATCH(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PATCH", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - -# end of class RESTClientObject -def is_ipv4(target): - """ Test if IPv4 address or not - """ - try: - chk = ipaddress.IPv4Address(target) - return True - except ipaddress.AddressValueError: - return False - -def in_ipv4net(target, net): - """ Test if target belongs to given IPv4 network - """ - try: - nw = ipaddress.IPv4Network(net) - ip = ipaddress.IPv4Address(target) - if ip in nw: - return True - return False - except ipaddress.AddressValueError: - return False - except ipaddress.NetmaskValueError: - return False - -def should_bypass_proxies(url, no_proxy=None): - """ Yet another requests.should_bypass_proxies - Test if proxies should not be used for a particular url. - """ - - parsed = urlparse(url) - - # special cases - if parsed.hostname in [None, '']: - return True - - # special cases - if no_proxy in [None , '']: - return False - if no_proxy == '*': - return True - - no_proxy = no_proxy.lower().replace(' ',''); - entries = ( - host for host in no_proxy.split(',') if host - ) - - if is_ipv4(parsed.hostname): - for item in entries: - if in_ipv4net(parsed.hostname, item): - return True - return proxy_bypass_environment(parsed.hostname, {'no': no_proxy} ) + return RESTResponse(r) diff --git a/sdks/python/client/docs/AWSElasticBlockStoreVolumeSource.md b/sdks/python/client/docs/AWSElasticBlockStoreVolumeSource.md index aae0b49e9a51..1848b1484a38 100644 --- a/sdks/python/client/docs/AWSElasticBlockStoreVolumeSource.md +++ b/sdks/python/client/docs/AWSElasticBlockStoreVolumeSource.md @@ -3,14 +3,31 @@ Represents a Persistent Disk resource in AWS. An AWS EBS disk must exist before mounting to a container. The disk must also be in the same AWS zone as the kubelet. An AWS EBS disk can only be mounted as read/write once. AWS EBS volumes support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**volume_id** | **str** | Unique ID of the persistent disk resource in AWS (Amazon EBS volume). More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore | **fs_type** | **str** | Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore | [optional] **partition** | **int** | The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty). | [optional] **read_only** | **bool** | Specify \"true\" to force and set the ReadOnly property in VolumeMounts to \"true\". If omitted, the default is \"false\". More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**volume_id** | **str** | Unique ID of the persistent disk resource in AWS (Amazon EBS volume). More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore | + +## Example + +```python +from argo_workflows.models.aws_elastic_block_store_volume_source import AWSElasticBlockStoreVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of AWSElasticBlockStoreVolumeSource from a JSON string +aws_elastic_block_store_volume_source_instance = AWSElasticBlockStoreVolumeSource.from_json(json) +# print the JSON string representation of the object +print(AWSElasticBlockStoreVolumeSource.to_json()) +# convert the object into a dict +aws_elastic_block_store_volume_source_dict = aws_elastic_block_store_volume_source_instance.to_dict() +# create an instance of AWSElasticBlockStoreVolumeSource from a dict +aws_elastic_block_store_volume_source_form_dict = aws_elastic_block_store_volume_source.from_dict(aws_elastic_block_store_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Affinity.md b/sdks/python/client/docs/Affinity.md index 6cd9e03d4c82..a787265e544b 100644 --- a/sdks/python/client/docs/Affinity.md +++ b/sdks/python/client/docs/Affinity.md @@ -3,13 +3,30 @@ Affinity is a group of affinity scheduling rules. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **node_affinity** | [**NodeAffinity**](NodeAffinity.md) | | [optional] **pod_affinity** | [**PodAffinity**](PodAffinity.md) | | [optional] **pod_anti_affinity** | [**PodAntiAffinity**](PodAntiAffinity.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.affinity import Affinity + +# TODO update the JSON string below +json = "{}" +# create an instance of Affinity from a JSON string +affinity_instance = Affinity.from_json(json) +# print the JSON string representation of the object +print(Affinity.to_json()) + +# convert the object into a dict +affinity_dict = affinity_instance.to_dict() +# create an instance of Affinity from a dict +affinity_form_dict = affinity.from_dict(affinity_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ArchivedWorkflowServiceApi.md b/sdks/python/client/docs/ArchivedWorkflowServiceApi.md index 228c380586b8..ab50ebd23dbd 100644 --- a/sdks/python/client/docs/ArchivedWorkflowServiceApi.md +++ b/sdks/python/client/docs/ArchivedWorkflowServiceApi.md @@ -14,7 +14,7 @@ Method | HTTP request | Description # **delete_archived_workflow** -> bool, date, datetime, dict, float, int, list, str, none_type delete_archived_workflow(uid) +> object delete_archived_workflow(uid, namespace=namespace) @@ -23,11 +23,10 @@ Method | HTTP request | Description * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import archived_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -40,7 +39,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -48,37 +47,31 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) - uid = "uid_example" # str | - namespace = "namespace_example" # str | (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.delete_archived_workflow(uid) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling ArchivedWorkflowServiceApi->delete_archived_workflow: %s\n" % e) + api_instance = argo_workflows.ArchivedWorkflowServiceApi(api_client) + uid = 'uid_example' # str | + namespace = 'namespace_example' # str | (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.delete_archived_workflow(uid, namespace=namespace) + print("The response of ArchivedWorkflowServiceApi->delete_archived_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArchivedWorkflowServiceApi->delete_archived_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **uid** | **str**| | - **namespace** | **str**| | [optional] + **uid** | **str**| | + **namespace** | **str**| | [optional] ### Return type -**bool, date, datetime, dict, float, int, list, str, none_type** +**object** ### Authorization @@ -89,7 +82,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -100,7 +92,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_archived_workflow** -> IoArgoprojWorkflowV1alpha1Workflow get_archived_workflow(uid) +> IoArgoprojWorkflowV1alpha1Workflow get_archived_workflow(uid, namespace=namespace, name=name) @@ -109,12 +101,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import archived_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -127,7 +118,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -135,35 +126,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) - uid = "uid_example" # str | - namespace = "namespace_example" # str | (optional) - name = "name_example" # str | (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.get_archived_workflow(uid) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling ArchivedWorkflowServiceApi->get_archived_workflow: %s\n" % e) + api_instance = argo_workflows.ArchivedWorkflowServiceApi(api_client) + uid = 'uid_example' # str | + namespace = 'namespace_example' # str | (optional) + name = 'name_example' # str | (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.get_archived_workflow(uid, namespace=namespace, name=name) + print("The response of ArchivedWorkflowServiceApi->get_archived_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArchivedWorkflowServiceApi->get_archived_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **uid** | **str**| | - **namespace** | **str**| | [optional] - **name** | **str**| | [optional] + **uid** | **str**| | + **namespace** | **str**| | [optional] + **name** | **str**| | [optional] ### Return type @@ -178,7 +163,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -189,7 +173,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_archived_workflow_label_keys** -> IoArgoprojWorkflowV1alpha1LabelKeys list_archived_workflow_label_keys() +> IoArgoprojWorkflowV1alpha1LabelKeys list_archived_workflow_label_keys(namespace=namespace) @@ -198,12 +182,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import archived_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_label_keys import IoArgoprojWorkflowV1alpha1LabelKeys +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_keys import IoArgoprojWorkflowV1alpha1LabelKeys +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -216,7 +199,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -224,24 +207,25 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) - namespace = "namespace_example" # str | (optional) + api_instance = argo_workflows.ArchivedWorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.list_archived_workflow_label_keys(namespace=namespace) + print("The response of ArchivedWorkflowServiceApi->list_archived_workflow_label_keys:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArchivedWorkflowServiceApi->list_archived_workflow_label_keys: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | [optional] + **namespace** | **str**| | [optional] ### Return type @@ -256,7 +240,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -267,7 +250,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_archived_workflow_label_values** -> IoArgoprojWorkflowV1alpha1LabelValues list_archived_workflow_label_values() +> IoArgoprojWorkflowV1alpha1LabelValues list_archived_workflow_label_values(list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue, namespace=namespace) @@ -276,12 +259,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import archived_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_label_values import IoArgoprojWorkflowV1alpha1LabelValues +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_values import IoArgoprojWorkflowV1alpha1LabelValues +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -294,7 +276,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -302,42 +284,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.ArchivedWorkflowServiceApi(api_client) + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - namespace = "namespace_example" # str | (optional) - - # example passing only required values which don't have defaults set - # and optional values + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) + namespace = 'namespace_example' # str | (optional) + try: api_response = api_instance.list_archived_workflow_label_values(list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue, namespace=namespace) + print("The response of ArchivedWorkflowServiceApi->list_archived_workflow_label_values:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArchivedWorkflowServiceApi->list_archived_workflow_label_values: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - **namespace** | **str**| | [optional] + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **namespace** | **str**| | [optional] ### Return type @@ -352,7 +335,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -363,7 +345,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_archived_workflows** -> IoArgoprojWorkflowV1alpha1WorkflowList list_archived_workflows() +> IoArgoprojWorkflowV1alpha1WorkflowList list_archived_workflows(list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue, name_prefix=name_prefix, namespace=namespace) @@ -372,12 +354,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import archived_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -390,7 +371,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -398,44 +379,45 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.ArchivedWorkflowServiceApi(api_client) + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - name_prefix = "namePrefix_example" # str | (optional) - namespace = "namespace_example" # str | (optional) - - # example passing only required values which don't have defaults set - # and optional values + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) + name_prefix = 'name_prefix_example' # str | (optional) + namespace = 'namespace_example' # str | (optional) + try: api_response = api_instance.list_archived_workflows(list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue, name_prefix=name_prefix, namespace=namespace) + print("The response of ArchivedWorkflowServiceApi->list_archived_workflows:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArchivedWorkflowServiceApi->list_archived_workflows: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - **name_prefix** | **str**| | [optional] - **namespace** | **str**| | [optional] + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **name_prefix** | **str**| | [optional] + **namespace** | **str**| | [optional] ### Return type @@ -450,7 +432,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -470,13 +451,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import archived_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request import IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request import IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -489,7 +469,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -497,33 +477,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) - uid = "uid_example" # str | - body = IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest( - memoized=True, - name="name_example", - namespace="namespace_example", - parameters=[ - "parameters_example", - ], - uid="uid_example", - ) # IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.ArchivedWorkflowServiceApi(api_client) + uid = 'uid_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest() # IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest | + try: api_response = api_instance.resubmit_archived_workflow(uid, body) + print("The response of ArchivedWorkflowServiceApi->resubmit_archived_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArchivedWorkflowServiceApi->resubmit_archived_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **uid** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest**](IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md)| | + **uid** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest**](IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md)| | ### Return type @@ -538,7 +512,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -558,13 +531,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import archived_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_retry_archived_workflow_request import IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_archived_workflow_request import IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -577,7 +549,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -585,34 +557,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = archived_workflow_service_api.ArchivedWorkflowServiceApi(api_client) - uid = "uid_example" # str | - body = IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest( - name="name_example", - namespace="namespace_example", - node_field_selector="node_field_selector_example", - parameters=[ - "parameters_example", - ], - restart_successful=True, - uid="uid_example", - ) # IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.ArchivedWorkflowServiceApi(api_client) + uid = 'uid_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest() # IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest | + try: api_response = api_instance.retry_archived_workflow(uid, body) + print("The response of ArchivedWorkflowServiceApi->retry_archived_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArchivedWorkflowServiceApi->retry_archived_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **uid** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest**](IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md)| | + **uid** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest**](IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md)| | ### Return type @@ -627,7 +592,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/docs/ArtifactServiceApi.md b/sdks/python/client/docs/ArtifactServiceApi.md index d27b7e680de1..3e21d7479c4f 100644 --- a/sdks/python/client/docs/ArtifactServiceApi.md +++ b/sdks/python/client/docs/ArtifactServiceApi.md @@ -12,7 +12,7 @@ Method | HTTP request | Description # **get_artifact_file** -> file_type get_artifact_file(namespace, id_discriminator, id, node_id, artifact_name, ) +> bytearray get_artifact_file(namespace, id_discriminator, id, node_id, artifact_name, artifact_discriminator) Get an artifact. @@ -21,11 +21,10 @@ Get an artifact. * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import artifact_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -38,7 +37,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -46,37 +45,40 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = artifact_service_api.ArtifactServiceApi(api_client) - namespace = "namespace_example" # str | - id_discriminator = "workflow" # str | - id = "id_example" # str | - node_id = "nodeId_example" # str | - artifact_name = "artifactName_example" # str | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.ArtifactServiceApi(api_client) + namespace = 'namespace_example' # str | + id_discriminator = 'id_discriminator_example' # str | + id = 'id_example' # str | + node_id = 'node_id_example' # str | + artifact_name = 'artifact_name_example' # str | + artifact_discriminator = 'artifact_discriminator_example' # str | + try: # Get an artifact. - api_response = api_instance.get_artifact_file(namespace, id_discriminator, id, node_id, artifact_name, ) + api_response = api_instance.get_artifact_file(namespace, id_discriminator, id, node_id, artifact_name, artifact_discriminator) + print("The response of ArtifactServiceApi->get_artifact_file:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArtifactServiceApi->get_artifact_file: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **id_discriminator** | **str**| | - **id** | **str**| | - **node_id** | **str**| | - **artifact_name** | **str**| | - **artifact_discriminator** | **str**| | defaults to "outputs" + **namespace** | **str**| | + **id_discriminator** | **str**| | + **id** | **str**| | + **node_id** | **str**| | + **artifact_name** | **str**| | + **artifact_discriminator** | **str**| | ### Return type -**file_type** +**bytearray** ### Authorization @@ -87,7 +89,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -98,7 +99,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_input_artifact** -> file_type get_input_artifact(namespace, name, node_id, artifact_name) +> bytearray get_input_artifact(namespace, name, node_id, artifact_name) Get an input artifact. @@ -107,11 +108,10 @@ Get an input artifact. * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import artifact_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -124,7 +124,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -132,34 +132,36 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = artifact_service_api.ArtifactServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - node_id = "nodeId_example" # str | - artifact_name = "artifactName_example" # str | + api_instance = argo_workflows.ArtifactServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + node_id = 'node_id_example' # str | + artifact_name = 'artifact_name_example' # str | - # example passing only required values which don't have defaults set try: # Get an input artifact. api_response = api_instance.get_input_artifact(namespace, name, node_id, artifact_name) + print("The response of ArtifactServiceApi->get_input_artifact:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArtifactServiceApi->get_input_artifact: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **node_id** | **str**| | - **artifact_name** | **str**| | + **namespace** | **str**| | + **name** | **str**| | + **node_id** | **str**| | + **artifact_name** | **str**| | ### Return type -**file_type** +**bytearray** ### Authorization @@ -170,7 +172,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -181,7 +182,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_input_artifact_by_uid** -> file_type get_input_artifact_by_uid(uid, node_id, artifact_name) +> bytearray get_input_artifact_by_uid(uid, node_id, artifact_name) Get an input artifact by UID. @@ -190,11 +191,10 @@ Get an input artifact by UID. * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import artifact_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -207,7 +207,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -215,32 +215,34 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = artifact_service_api.ArtifactServiceApi(api_client) - uid = "uid_example" # str | - node_id = "nodeId_example" # str | - artifact_name = "artifactName_example" # str | + api_instance = argo_workflows.ArtifactServiceApi(api_client) + uid = 'uid_example' # str | + node_id = 'node_id_example' # str | + artifact_name = 'artifact_name_example' # str | - # example passing only required values which don't have defaults set try: # Get an input artifact by UID. api_response = api_instance.get_input_artifact_by_uid(uid, node_id, artifact_name) + print("The response of ArtifactServiceApi->get_input_artifact_by_uid:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArtifactServiceApi->get_input_artifact_by_uid: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **uid** | **str**| | - **node_id** | **str**| | - **artifact_name** | **str**| | + **uid** | **str**| | + **node_id** | **str**| | + **artifact_name** | **str**| | ### Return type -**file_type** +**bytearray** ### Authorization @@ -251,7 +253,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -262,7 +263,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_output_artifact** -> file_type get_output_artifact(namespace, name, node_id, artifact_name) +> bytearray get_output_artifact(namespace, name, node_id, artifact_name) Get an output artifact. @@ -271,11 +272,10 @@ Get an output artifact. * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import artifact_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -288,7 +288,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -296,34 +296,36 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = artifact_service_api.ArtifactServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - node_id = "nodeId_example" # str | - artifact_name = "artifactName_example" # str | + api_instance = argo_workflows.ArtifactServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + node_id = 'node_id_example' # str | + artifact_name = 'artifact_name_example' # str | - # example passing only required values which don't have defaults set try: # Get an output artifact. api_response = api_instance.get_output_artifact(namespace, name, node_id, artifact_name) + print("The response of ArtifactServiceApi->get_output_artifact:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArtifactServiceApi->get_output_artifact: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **node_id** | **str**| | - **artifact_name** | **str**| | + **namespace** | **str**| | + **name** | **str**| | + **node_id** | **str**| | + **artifact_name** | **str**| | ### Return type -**file_type** +**bytearray** ### Authorization @@ -334,7 +336,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -345,7 +346,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_output_artifact_by_uid** -> file_type get_output_artifact_by_uid(uid, node_id, artifact_name) +> bytearray get_output_artifact_by_uid(uid, node_id, artifact_name) Get an output artifact by UID. @@ -354,11 +355,10 @@ Get an output artifact by UID. * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import artifact_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -371,7 +371,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -379,32 +379,34 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = artifact_service_api.ArtifactServiceApi(api_client) - uid = "uid_example" # str | - node_id = "nodeId_example" # str | - artifact_name = "artifactName_example" # str | + api_instance = argo_workflows.ArtifactServiceApi(api_client) + uid = 'uid_example' # str | + node_id = 'node_id_example' # str | + artifact_name = 'artifact_name_example' # str | - # example passing only required values which don't have defaults set try: # Get an output artifact by UID. api_response = api_instance.get_output_artifact_by_uid(uid, node_id, artifact_name) + print("The response of ArtifactServiceApi->get_output_artifact_by_uid:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ArtifactServiceApi->get_output_artifact_by_uid: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **uid** | **str**| | - **node_id** | **str**| | - **artifact_name** | **str**| | + **uid** | **str**| | + **node_id** | **str**| | + **artifact_name** | **str**| | ### Return type -**file_type** +**bytearray** ### Authorization @@ -415,7 +417,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/docs/AzureDiskVolumeSource.md b/sdks/python/client/docs/AzureDiskVolumeSource.md index 08b52b45e73f..bf79b3c7cc6a 100644 --- a/sdks/python/client/docs/AzureDiskVolumeSource.md +++ b/sdks/python/client/docs/AzureDiskVolumeSource.md @@ -3,16 +3,33 @@ AzureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**caching_mode** | **str** | Host Caching mode: None, Read Only, Read Write. | [optional] **disk_name** | **str** | The Name of the data disk in the blob storage | **disk_uri** | **str** | The URI the data disk in the blob storage | -**caching_mode** | **str** | Host Caching mode: None, Read Only, Read Write. | [optional] **fs_type** | **str** | Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. | [optional] **kind** | **str** | Expected values Shared: multiple blob disks per storage account Dedicated: single blob disk per storage account Managed: azure managed data disk (only in managed availability set). defaults to shared | [optional] **read_only** | **bool** | Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.azure_disk_volume_source import AzureDiskVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of AzureDiskVolumeSource from a JSON string +azure_disk_volume_source_instance = AzureDiskVolumeSource.from_json(json) +# print the JSON string representation of the object +print(AzureDiskVolumeSource.to_json()) + +# convert the object into a dict +azure_disk_volume_source_dict = azure_disk_volume_source_instance.to_dict() +# create an instance of AzureDiskVolumeSource from a dict +azure_disk_volume_source_form_dict = azure_disk_volume_source.from_dict(azure_disk_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/AzureFileVolumeSource.md b/sdks/python/client/docs/AzureFileVolumeSource.md index 2c799d85ab81..ebb475cae4e9 100644 --- a/sdks/python/client/docs/AzureFileVolumeSource.md +++ b/sdks/python/client/docs/AzureFileVolumeSource.md @@ -3,13 +3,30 @@ AzureFile represents an Azure File Service mount on the host and bind mount to the pod. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**read_only** | **bool** | Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. | [optional] **secret_name** | **str** | the name of secret that contains Azure Storage Account Name and Key | **share_name** | **str** | Share Name | -**read_only** | **bool** | Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.azure_file_volume_source import AzureFileVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of AzureFileVolumeSource from a JSON string +azure_file_volume_source_instance = AzureFileVolumeSource.from_json(json) +# print the JSON string representation of the object +print(AzureFileVolumeSource.to_json()) + +# convert the object into a dict +azure_file_volume_source_dict = azure_file_volume_source_instance.to_dict() +# create an instance of AzureFileVolumeSource from a dict +azure_file_volume_source_form_dict = azure_file_volume_source.from_dict(azure_file_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/CSIVolumeSource.md b/sdks/python/client/docs/CSIVolumeSource.md index ee914513c775..7a13f91ce059 100644 --- a/sdks/python/client/docs/CSIVolumeSource.md +++ b/sdks/python/client/docs/CSIVolumeSource.md @@ -3,15 +3,32 @@ Represents a source location of a volume to mount, managed by an external CSI driver ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **driver** | **str** | Driver is the name of the CSI driver that handles this volume. Consult with your admin for the correct name as registered in the cluster. | **fs_type** | **str** | Filesystem type to mount. Ex. \"ext4\", \"xfs\", \"ntfs\". If not provided, the empty value is passed to the associated CSI driver which will determine the default filesystem to apply. | [optional] **node_publish_secret_ref** | [**LocalObjectReference**](LocalObjectReference.md) | | [optional] **read_only** | **bool** | Specifies a read-only configuration for the volume. Defaults to false (read/write). | [optional] -**volume_attributes** | **{str: (str,)}** | VolumeAttributes stores driver-specific properties that are passed to the CSI driver. Consult your driver's documentation for supported values. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**volume_attributes** | **Dict[str, str]** | VolumeAttributes stores driver-specific properties that are passed to the CSI driver. Consult your driver's documentation for supported values. | [optional] + +## Example + +```python +from argo_workflows.models.csi_volume_source import CSIVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of CSIVolumeSource from a JSON string +csi_volume_source_instance = CSIVolumeSource.from_json(json) +# print the JSON string representation of the object +print(CSIVolumeSource.to_json()) +# convert the object into a dict +csi_volume_source_dict = csi_volume_source_instance.to_dict() +# create an instance of CSIVolumeSource from a dict +csi_volume_source_form_dict = csi_volume_source.from_dict(csi_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Capabilities.md b/sdks/python/client/docs/Capabilities.md index acacbcd7c946..c17124dea2dd 100644 --- a/sdks/python/client/docs/Capabilities.md +++ b/sdks/python/client/docs/Capabilities.md @@ -3,12 +3,29 @@ Adds and removes POSIX capabilities from running containers. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**add** | **[str]** | Added capabilities | [optional] -**drop** | **[str]** | Removed capabilities | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**add** | **List[str]** | Added capabilities | [optional] +**drop** | **List[str]** | Removed capabilities | [optional] + +## Example + +```python +from argo_workflows.models.capabilities import Capabilities + +# TODO update the JSON string below +json = "{}" +# create an instance of Capabilities from a JSON string +capabilities_instance = Capabilities.from_json(json) +# print the JSON string representation of the object +print(Capabilities.to_json()) +# convert the object into a dict +capabilities_dict = capabilities_instance.to_dict() +# create an instance of Capabilities from a dict +capabilities_form_dict = capabilities.from_dict(capabilities_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/CephFSVolumeSource.md b/sdks/python/client/docs/CephFSVolumeSource.md index 30fc005fd126..6a24713e9d2b 100644 --- a/sdks/python/client/docs/CephFSVolumeSource.md +++ b/sdks/python/client/docs/CephFSVolumeSource.md @@ -3,16 +3,33 @@ Represents a Ceph Filesystem mount that lasts the lifetime of a pod Cephfs volumes do not support ownership management or SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**monitors** | **[str]** | Required: Monitors is a collection of Ceph monitors More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it | +**monitors** | **List[str]** | Required: Monitors is a collection of Ceph monitors More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it | **path** | **str** | Optional: Used as the mounted root, rather than the full Ceph tree, default is / | [optional] **read_only** | **bool** | Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it | [optional] **secret_file** | **str** | Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it | [optional] **secret_ref** | [**LocalObjectReference**](LocalObjectReference.md) | | [optional] **user** | **str** | Optional: User is the rados user name, default is admin More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.ceph_fs_volume_source import CephFSVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of CephFSVolumeSource from a JSON string +ceph_fs_volume_source_instance = CephFSVolumeSource.from_json(json) +# print the JSON string representation of the object +print(CephFSVolumeSource.to_json()) + +# convert the object into a dict +ceph_fs_volume_source_dict = ceph_fs_volume_source_instance.to_dict() +# create an instance of CephFSVolumeSource from a dict +ceph_fs_volume_source_form_dict = ceph_fs_volume_source.from_dict(ceph_fs_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/CinderVolumeSource.md b/sdks/python/client/docs/CinderVolumeSource.md index 599e76f957b8..7ec72ac0d3dc 100644 --- a/sdks/python/client/docs/CinderVolumeSource.md +++ b/sdks/python/client/docs/CinderVolumeSource.md @@ -3,14 +3,31 @@ Represents a cinder volume resource in Openstack. A Cinder volume must exist before mounting to a container. The volume must also be in the same region as the kubelet. Cinder volumes support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**volume_id** | **str** | volume id used to identify the volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md | **fs_type** | **str** | Filesystem type to mount. Must be a filesystem type supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://examples.k8s.io/mysql-cinder-pd/README.md | [optional] **read_only** | **bool** | Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/mysql-cinder-pd/README.md | [optional] **secret_ref** | [**LocalObjectReference**](LocalObjectReference.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**volume_id** | **str** | volume id used to identify the volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md | + +## Example + +```python +from argo_workflows.models.cinder_volume_source import CinderVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of CinderVolumeSource from a JSON string +cinder_volume_source_instance = CinderVolumeSource.from_json(json) +# print the JSON string representation of the object +print(CinderVolumeSource.to_json()) +# convert the object into a dict +cinder_volume_source_dict = cinder_volume_source_instance.to_dict() +# create an instance of CinderVolumeSource from a dict +cinder_volume_source_form_dict = cinder_volume_source.from_dict(cinder_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md b/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md index 5f2d50121d95..705152f613b2 100644 --- a/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md +++ b/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md @@ -22,13 +22,12 @@ Method | HTTP request | Description * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cluster_workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -41,7 +40,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -49,10694 +48,25 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) - body = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest( - create_options=CreateOptions( - dry_run=[ - "dry_run_example", - ], - field_manager="field_manager_example", - field_validation="field_validation_example", - ), - template=IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - ), - ) # IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest | + api_instance = argo_workflows.ClusterWorkflowTemplateServiceApi(api_client) + body = argo_workflows.IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest() # IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.create_cluster_workflow_template(body) + print("The response of ClusterWorkflowTemplateServiceApi->create_cluster_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ClusterWorkflowTemplateServiceApi->create_cluster_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest.md)| | + **body** | [**IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest.md)| | ### Return type @@ -10751,7 +81,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -10762,7 +91,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **delete_cluster_workflow_template** -> bool, date, datetime, dict, float, int, list, str, none_type delete_cluster_workflow_template(name) +> object delete_cluster_workflow_template(name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) @@ -10771,11 +100,10 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cluster_workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -10788,7 +116,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -10796,49 +124,41 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) - name = "name_example" # str | - delete_options_grace_period_seconds = "deleteOptions.gracePeriodSeconds_example" # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) - delete_options_preconditions_uid = "deleteOptions.preconditions.uid_example" # str | Specifies the target UID. +optional. (optional) - delete_options_preconditions_resource_version = "deleteOptions.preconditions.resourceVersion_example" # str | Specifies the target ResourceVersion +optional. (optional) + api_instance = argo_workflows.ClusterWorkflowTemplateServiceApi(api_client) + name = 'name_example' # str | + delete_options_grace_period_seconds = 'delete_options_grace_period_seconds_example' # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) + delete_options_preconditions_uid = 'delete_options_preconditions_uid_example' # str | Specifies the target UID. +optional. (optional) + delete_options_preconditions_resource_version = 'delete_options_preconditions_resource_version_example' # str | Specifies the target ResourceVersion +optional. (optional) delete_options_orphan_dependents = True # bool | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. (optional) - delete_options_propagation_policy = "deleteOptions.propagationPolicy_example" # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) - delete_options_dry_run = [ - "deleteOptions.dryRun_example", - ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.delete_cluster_workflow_template(name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling ClusterWorkflowTemplateServiceApi->delete_cluster_workflow_template: %s\n" % e) + delete_options_propagation_policy = 'delete_options_propagation_policy_example' # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) + delete_options_dry_run = ['delete_options_dry_run_example'] # List[str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.delete_cluster_workflow_template(name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + print("The response of ClusterWorkflowTemplateServiceApi->delete_cluster_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ClusterWorkflowTemplateServiceApi->delete_cluster_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **name** | **str**| | - **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] - **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] - **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] - **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] - **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] - **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] + **name** | **str**| | + **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] + **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] + **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] + **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] + **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] + **delete_options_dry_run** | [**List[str]**](str.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] ### Return type -**bool, date, datetime, dict, float, int, list, str, none_type** +**object** ### Authorization @@ -10849,7 +169,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -10860,7 +179,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_cluster_workflow_template** -> IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate get_cluster_workflow_template(name) +> IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate get_cluster_workflow_template(name, get_options_resource_version=get_options_resource_version) @@ -10869,12 +188,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cluster_workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -10887,7 +205,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -10895,33 +213,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) - name = "name_example" # str | - get_options_resource_version = "getOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.get_cluster_workflow_template(name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling ClusterWorkflowTemplateServiceApi->get_cluster_workflow_template: %s\n" % e) + api_instance = argo_workflows.ClusterWorkflowTemplateServiceApi(api_client) + name = 'name_example' # str | + get_options_resource_version = 'get_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.get_cluster_workflow_template(name, get_options_resource_version=get_options_resource_version) + print("The response of ClusterWorkflowTemplateServiceApi->get_cluster_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ClusterWorkflowTemplateServiceApi->get_cluster_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **name** | **str**| | - **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **name** | **str**| | + **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] ### Return type @@ -10936,7 +248,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -10956,13 +267,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cluster_workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -10975,7 +285,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -10983,10694 +293,25 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) - body = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest( - create_options=CreateOptions( - dry_run=[ - "dry_run_example", - ], - field_manager="field_manager_example", - field_validation="field_validation_example", - ), - template=IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - ), - ) # IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest | + api_instance = argo_workflows.ClusterWorkflowTemplateServiceApi(api_client) + body = argo_workflows.IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest() # IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.lint_cluster_workflow_template(body) + print("The response of ClusterWorkflowTemplateServiceApi->lint_cluster_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ClusterWorkflowTemplateServiceApi->lint_cluster_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest.md)| | + **body** | [**IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest.md)| | ### Return type @@ -21685,7 +326,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -21696,7 +336,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_cluster_workflow_templates** -> IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList list_cluster_workflow_templates() +> IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList list_cluster_workflow_templates(list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) @@ -21705,12 +345,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cluster_workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_list import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_list import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -21723,7 +362,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -21731,40 +370,41 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.ClusterWorkflowTemplateServiceApi(api_client) + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.list_cluster_workflow_templates(list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) + print("The response of ClusterWorkflowTemplateServiceApi->list_cluster_workflow_templates:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ClusterWorkflowTemplateServiceApi->list_cluster_workflow_templates: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] ### Return type @@ -21779,7 +419,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -21799,13 +438,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cluster_workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -21818,7 +456,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -21826,10690 +464,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cluster_workflow_template_service_api.ClusterWorkflowTemplateServiceApi(api_client) - name = "name_example" # str | DEPRECATED: This field is ignored. - body = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest( - name="name_example", - template=IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - ), - ) # IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest | + api_instance = argo_workflows.ClusterWorkflowTemplateServiceApi(api_client) + name = 'name_example' # str | DEPRECATED: This field is ignored. + body = argo_workflows.IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest() # IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.update_cluster_workflow_template(name, body) + print("The response of ClusterWorkflowTemplateServiceApi->update_cluster_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling ClusterWorkflowTemplateServiceApi->update_cluster_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **name** | **str**| DEPRECATED: This field is ignored. | - **body** | [**IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest.md)| | + **name** | **str**| DEPRECATED: This field is ignored. | + **body** | [**IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest.md)| | ### Return type @@ -32524,7 +499,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/docs/ConfigMapEnvSource.md b/sdks/python/client/docs/ConfigMapEnvSource.md index 115e1b4e025d..c620d5fd75f1 100644 --- a/sdks/python/client/docs/ConfigMapEnvSource.md +++ b/sdks/python/client/docs/ConfigMapEnvSource.md @@ -3,12 +3,29 @@ ConfigMapEnvSource selects a ConfigMap to populate the environment variables with. The contents of the target ConfigMap's Data field will represent the key-value pairs as environment variables. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | [optional] **optional** | **bool** | Specify whether the ConfigMap must be defined | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.config_map_env_source import ConfigMapEnvSource + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigMapEnvSource from a JSON string +config_map_env_source_instance = ConfigMapEnvSource.from_json(json) +# print the JSON string representation of the object +print(ConfigMapEnvSource.to_json()) + +# convert the object into a dict +config_map_env_source_dict = config_map_env_source_instance.to_dict() +# create an instance of ConfigMapEnvSource from a dict +config_map_env_source_form_dict = config_map_env_source.from_dict(config_map_env_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ConfigMapKeySelector.md b/sdks/python/client/docs/ConfigMapKeySelector.md index 038f51a6294a..c6cea4c91b8a 100644 --- a/sdks/python/client/docs/ConfigMapKeySelector.md +++ b/sdks/python/client/docs/ConfigMapKeySelector.md @@ -3,13 +3,30 @@ Selects a key from a ConfigMap. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | The key to select. | **name** | **str** | Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | [optional] **optional** | **bool** | Specify whether the ConfigMap or its key must be defined | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.config_map_key_selector import ConfigMapKeySelector + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigMapKeySelector from a JSON string +config_map_key_selector_instance = ConfigMapKeySelector.from_json(json) +# print the JSON string representation of the object +print(ConfigMapKeySelector.to_json()) + +# convert the object into a dict +config_map_key_selector_dict = config_map_key_selector_instance.to_dict() +# create an instance of ConfigMapKeySelector from a dict +config_map_key_selector_form_dict = config_map_key_selector.from_dict(config_map_key_selector_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ConfigMapProjection.md b/sdks/python/client/docs/ConfigMapProjection.md index 458d5ac2c188..2a131fc8b86f 100644 --- a/sdks/python/client/docs/ConfigMapProjection.md +++ b/sdks/python/client/docs/ConfigMapProjection.md @@ -3,13 +3,30 @@ Adapts a ConfigMap into a projected volume. The contents of the target ConfigMap's Data field will be presented in a projected volume as files using the keys in the Data field as the file names, unless the items element is populated with specific mappings of keys to paths. Note that this is identical to a configmap volume source without the default mode. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[KeyToPath]**](KeyToPath.md) | If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'. | [optional] +**items** | [**List[KeyToPath]**](KeyToPath.md) | If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'. | [optional] **name** | **str** | Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | [optional] **optional** | **bool** | Specify whether the ConfigMap or its keys must be defined | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.config_map_projection import ConfigMapProjection + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigMapProjection from a JSON string +config_map_projection_instance = ConfigMapProjection.from_json(json) +# print the JSON string representation of the object +print(ConfigMapProjection.to_json()) + +# convert the object into a dict +config_map_projection_dict = config_map_projection_instance.to_dict() +# create an instance of ConfigMapProjection from a dict +config_map_projection_form_dict = config_map_projection.from_dict(config_map_projection_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ConfigMapVolumeSource.md b/sdks/python/client/docs/ConfigMapVolumeSource.md index d4fa4742a171..921823c8765d 100644 --- a/sdks/python/client/docs/ConfigMapVolumeSource.md +++ b/sdks/python/client/docs/ConfigMapVolumeSource.md @@ -3,14 +3,31 @@ Adapts a ConfigMap into a volume. The contents of the target ConfigMap's Data field will be presented in a volume as files using the keys in the Data field as the file names, unless the items element is populated with specific mappings of keys to paths. ConfigMap volumes support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **default_mode** | **int** | Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. | [optional] -**items** | [**[KeyToPath]**](KeyToPath.md) | If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'. | [optional] +**items** | [**List[KeyToPath]**](KeyToPath.md) | If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'. | [optional] **name** | **str** | Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | [optional] **optional** | **bool** | Specify whether the ConfigMap or its keys must be defined | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.config_map_volume_source import ConfigMapVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigMapVolumeSource from a JSON string +config_map_volume_source_instance = ConfigMapVolumeSource.from_json(json) +# print the JSON string representation of the object +print(ConfigMapVolumeSource.to_json()) + +# convert the object into a dict +config_map_volume_source_dict = config_map_volume_source_instance.to_dict() +# create an instance of ConfigMapVolumeSource from a dict +config_map_volume_source_form_dict = config_map_volume_source.from_dict(config_map_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Container.md b/sdks/python/client/docs/Container.md index 6c70e01262fc..dd4814c04d00 100644 --- a/sdks/python/client/docs/Container.md +++ b/sdks/python/client/docs/Container.md @@ -3,18 +3,19 @@ A single application container that you want to run within a pod. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**args** | **List[str]** | Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **List[str]** | Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**env** | [**List[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] +**env_from** | [**List[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] **image** | **str** | Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | -**args** | **[str]** | Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **[str]** | Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**env** | [**[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] -**env_from** | [**[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] **image_pull_policy** | **str** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images Possible enum values: - `\"Always\"` means that kubelet always attempts to pull the latest image. Container will fail If the pull fails. - `\"IfNotPresent\"` means that kubelet pulls if the image isn't present on disk. Container will fail if the image isn't present and the pull fails. - `\"Never\"` means that kubelet never pulls an image, but only uses a local image. Container will fail if the image isn't present | [optional] **lifecycle** | [**Lifecycle**](Lifecycle.md) | | [optional] **liveness_probe** | [**Probe**](Probe.md) | | [optional] **name** | **str** | Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. | [optional] -**ports** | [**[ContainerPort]**](ContainerPort.md) | List of ports to expose from the container. Exposing a port here gives the system additional information about the network connections a container uses, but is primarily informational. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Cannot be updated. | [optional] +**ports** | [**List[ContainerPort]**](ContainerPort.md) | List of ports to expose from the container. Exposing a port here gives the system additional information about the network connections a container uses, but is primarily informational. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Cannot be updated. | [optional] **readiness_probe** | [**Probe**](Probe.md) | | [optional] **resources** | [**ResourceRequirements**](ResourceRequirements.md) | | [optional] **security_context** | [**SecurityContext**](SecurityContext.md) | | [optional] @@ -24,11 +25,27 @@ Name | Type | Description | Notes **termination_message_path** | **str** | Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated. | [optional] **termination_message_policy** | **str** | Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. Possible enum values: - `\"FallbackToLogsOnError\"` will read the most recent contents of the container logs for the container status message when the container exits with an error and the terminationMessagePath has no contents. - `\"File\"` is the default behavior and will set the container status message to the contents of the container's terminationMessagePath when the container exits. | [optional] **tty** | **bool** | Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false. | [optional] -**volume_devices** | [**[VolumeDevice]**](VolumeDevice.md) | volumeDevices is the list of block devices to be used by the container. | [optional] -**volume_mounts** | [**[VolumeMount]**](VolumeMount.md) | Pod volumes to mount into the container's filesystem. Cannot be updated. | [optional] +**volume_devices** | [**List[VolumeDevice]**](VolumeDevice.md) | volumeDevices is the list of block devices to be used by the container. | [optional] +**volume_mounts** | [**List[VolumeMount]**](VolumeMount.md) | Pod volumes to mount into the container's filesystem. Cannot be updated. | [optional] **working_dir** | **str** | Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.container import Container + +# TODO update the JSON string below +json = "{}" +# create an instance of Container from a JSON string +container_instance = Container.from_json(json) +# print the JSON string representation of the object +print(Container.to_json()) + +# convert the object into a dict +container_dict = container_instance.to_dict() +# create an instance of Container from a dict +container_form_dict = container.from_dict(container_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ContainerPort.md b/sdks/python/client/docs/ContainerPort.md index 36dfc1b46bf9..778713336a07 100644 --- a/sdks/python/client/docs/ContainerPort.md +++ b/sdks/python/client/docs/ContainerPort.md @@ -3,6 +3,7 @@ ContainerPort represents a network port in a single container. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **container_port** | **int** | Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536. | @@ -10,8 +11,24 @@ Name | Type | Description | Notes **host_port** | **int** | Number of port to expose on the host. If specified, this must be a valid port number, 0 < x < 65536. If HostNetwork is specified, this must match ContainerPort. Most containers do not need this. | [optional] **name** | **str** | If specified, this must be an IANA_SVC_NAME and unique within the pod. Each named port in a pod must have a unique name. Name for the port that can be referred to by services. | [optional] **protocol** | **str** | Protocol for port. Must be UDP, TCP, or SCTP. Defaults to \"TCP\". Possible enum values: - `\"SCTP\"` is the SCTP protocol. - `\"TCP\"` is the TCP protocol. - `\"UDP\"` is the UDP protocol. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.container_port import ContainerPort + +# TODO update the JSON string below +json = "{}" +# create an instance of ContainerPort from a JSON string +container_port_instance = ContainerPort.from_json(json) +# print the JSON string representation of the object +print(ContainerPort.to_json()) + +# convert the object into a dict +container_port_dict = container_port_instance.to_dict() +# create an instance of ContainerPort from a dict +container_port_form_dict = container_port.from_dict(container_port_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/CreateOptions.md b/sdks/python/client/docs/CreateOptions.md index 8536027156a1..0fa1c522190e 100644 --- a/sdks/python/client/docs/CreateOptions.md +++ b/sdks/python/client/docs/CreateOptions.md @@ -3,13 +3,30 @@ CreateOptions may be provided when creating an API object. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**dry_run** | **[str]** | | [optional] +**dry_run** | **List[str]** | | [optional] **field_manager** | **str** | | [optional] **field_validation** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.create_options import CreateOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateOptions from a JSON string +create_options_instance = CreateOptions.from_json(json) +# print the JSON string representation of the object +print(CreateOptions.to_json()) + +# convert the object into a dict +create_options_dict = create_options_instance.to_dict() +# create an instance of CreateOptions from a dict +create_options_form_dict = create_options.from_dict(create_options_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/CronWorkflowServiceApi.md b/sdks/python/client/docs/CronWorkflowServiceApi.md index 455b2749c5cd..7ad78dad80f7 100644 --- a/sdks/python/client/docs/CronWorkflowServiceApi.md +++ b/sdks/python/client/docs/CronWorkflowServiceApi.md @@ -24,13 +24,12 @@ Method | HTTP request | Description * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cron_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_create_cron_workflow_request import IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_cron_workflow_request import IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -43,7 +42,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -51,10779 +50,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - body = IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest( - create_options=CreateOptions( - dry_run=[ - "dry_run_example", - ], - field_manager="field_manager_example", - field_validation="field_validation_example", - ), - cron_workflow=IoArgoprojWorkflowV1alpha1CronWorkflow( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1CronWorkflowSpec( - concurrency_policy="concurrency_policy_example", - failed_jobs_history_limit=1, - schedule="schedule_example", - schedules=[ - "schedules_example", - ], - starting_deadline_seconds=1, - stop_strategy=IoArgoprojWorkflowV1alpha1StopStrategy( - condition="condition_example", - ), - successful_jobs_history_limit=1, - suspend=True, - timezone="timezone_example", - workflow_metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - workflow_spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - ), - status=IoArgoprojWorkflowV1alpha1CronWorkflowStatus( - active=[ - ObjectReference( - api_version="api_version_example", - field_path="field_path_example", - kind="kind_example", - name="name_example", - namespace="namespace_example", - resource_version="resource_version_example", - uid="uid_example", - ), - ], - conditions=[ - IoArgoprojWorkflowV1alpha1Condition( - message="message_example", - status="status_example", - type="type_example", - ), - ], - failed=1, - last_scheduled_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - phase="phase_example", - succeeded=1, - ), - ), - namespace="namespace_example", - ) # IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest | + api_instance = argo_workflows.CronWorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest() # IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.create_cron_workflow(namespace, body) + print("The response of CronWorkflowServiceApi->create_cron_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling CronWorkflowServiceApi->create_cron_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest**](IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest.md)| | + **namespace** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest**](IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest.md)| | ### Return type @@ -10838,7 +85,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -10849,7 +95,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **delete_cron_workflow** -> bool, date, datetime, dict, float, int, list, str, none_type delete_cron_workflow(namespace, name) +> object delete_cron_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) @@ -10858,11 +104,10 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cron_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -10875,7 +120,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -10883,51 +128,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - delete_options_grace_period_seconds = "deleteOptions.gracePeriodSeconds_example" # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) - delete_options_preconditions_uid = "deleteOptions.preconditions.uid_example" # str | Specifies the target UID. +optional. (optional) - delete_options_preconditions_resource_version = "deleteOptions.preconditions.resourceVersion_example" # str | Specifies the target ResourceVersion +optional. (optional) + api_instance = argo_workflows.CronWorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + delete_options_grace_period_seconds = 'delete_options_grace_period_seconds_example' # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) + delete_options_preconditions_uid = 'delete_options_preconditions_uid_example' # str | Specifies the target UID. +optional. (optional) + delete_options_preconditions_resource_version = 'delete_options_preconditions_resource_version_example' # str | Specifies the target ResourceVersion +optional. (optional) delete_options_orphan_dependents = True # bool | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. (optional) - delete_options_propagation_policy = "deleteOptions.propagationPolicy_example" # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) - delete_options_dry_run = [ - "deleteOptions.dryRun_example", - ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) + delete_options_propagation_policy = 'delete_options_propagation_policy_example' # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) + delete_options_dry_run = ['delete_options_dry_run_example'] # List[str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.delete_cron_workflow(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling CronWorkflowServiceApi->delete_cron_workflow: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.delete_cron_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + print("The response of CronWorkflowServiceApi->delete_cron_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling CronWorkflowServiceApi->delete_cron_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] - **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] - **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] - **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] - **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] - **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] + **namespace** | **str**| | + **name** | **str**| | + **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] + **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] + **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] + **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] + **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] + **delete_options_dry_run** | [**List[str]**](str.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] ### Return type -**bool, date, datetime, dict, float, int, list, str, none_type** +**object** ### Authorization @@ -10938,7 +175,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -10949,7 +185,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_cron_workflow** -> IoArgoprojWorkflowV1alpha1CronWorkflow get_cron_workflow(namespace, name) +> IoArgoprojWorkflowV1alpha1CronWorkflow get_cron_workflow(namespace, name, get_options_resource_version=get_options_resource_version) @@ -10958,12 +194,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cron_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -10976,7 +211,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -10984,35 +219,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - get_options_resource_version = "getOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.get_cron_workflow(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling CronWorkflowServiceApi->get_cron_workflow: %s\n" % e) + api_instance = argo_workflows.CronWorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + get_options_resource_version = 'get_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.get_cron_workflow(namespace, name, get_options_resource_version=get_options_resource_version) + print("The response of CronWorkflowServiceApi->get_cron_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling CronWorkflowServiceApi->get_cron_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **namespace** | **str**| | + **name** | **str**| | + **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] ### Return type @@ -11027,7 +256,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -11047,13 +275,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cron_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_lint_cron_workflow_request import IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lint_cron_workflow_request import IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -11066,7 +293,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -11074,10772 +301,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - body = IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest( - cron_workflow=IoArgoprojWorkflowV1alpha1CronWorkflow( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1CronWorkflowSpec( - concurrency_policy="concurrency_policy_example", - failed_jobs_history_limit=1, - schedule="schedule_example", - schedules=[ - "schedules_example", - ], - starting_deadline_seconds=1, - stop_strategy=IoArgoprojWorkflowV1alpha1StopStrategy( - condition="condition_example", - ), - successful_jobs_history_limit=1, - suspend=True, - timezone="timezone_example", - workflow_metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - workflow_spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - ), - status=IoArgoprojWorkflowV1alpha1CronWorkflowStatus( - active=[ - ObjectReference( - api_version="api_version_example", - field_path="field_path_example", - kind="kind_example", - name="name_example", - namespace="namespace_example", - resource_version="resource_version_example", - uid="uid_example", - ), - ], - conditions=[ - IoArgoprojWorkflowV1alpha1Condition( - message="message_example", - status="status_example", - type="type_example", - ), - ], - failed=1, - last_scheduled_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - phase="phase_example", - succeeded=1, - ), - ), - namespace="namespace_example", - ) # IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest | + api_instance = argo_workflows.CronWorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest() # IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.lint_cron_workflow(namespace, body) + print("The response of CronWorkflowServiceApi->lint_cron_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling CronWorkflowServiceApi->lint_cron_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest**](IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest.md)| | + **namespace** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest**](IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest.md)| | ### Return type @@ -21854,7 +336,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -21865,7 +346,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_cron_workflows** -> IoArgoprojWorkflowV1alpha1CronWorkflowList list_cron_workflows(namespace) +> IoArgoprojWorkflowV1alpha1CronWorkflowList list_cron_workflows(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) @@ -21874,12 +355,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cron_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_list import IoArgoprojWorkflowV1alpha1CronWorkflowList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_list import IoArgoprojWorkflowV1alpha1CronWorkflowList +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -21892,7 +372,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -21900,49 +380,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.CronWorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.list_cron_workflows(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling CronWorkflowServiceApi->list_cron_workflows: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.list_cron_workflows(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) + print("The response of CronWorkflowServiceApi->list_cron_workflows:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling CronWorkflowServiceApi->list_cron_workflows: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **namespace** | **str**| | + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] ### Return type @@ -21957,7 +431,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -21977,13 +450,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cron_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_resume_request import IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_resume_request import IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -21996,7 +468,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -22004,30 +476,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest( - name="name_example", - namespace="namespace_example", - ) # IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest | + api_instance = argo_workflows.CronWorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest() # IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.resume_cron_workflow(namespace, name, body) + print("The response of CronWorkflowServiceApi->resume_cron_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling CronWorkflowServiceApi->resume_cron_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest**](IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest**](IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest.md)| | ### Return type @@ -22042,7 +513,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -22062,13 +532,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cron_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request import IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request import IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -22081,7 +550,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -22089,30 +558,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest( - name="name_example", - namespace="namespace_example", - ) # IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest | + api_instance = argo_workflows.CronWorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest() # IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.suspend_cron_workflow(namespace, name, body) + print("The response of CronWorkflowServiceApi->suspend_cron_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling CronWorkflowServiceApi->suspend_cron_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest**](IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest**](IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest.md)| | ### Return type @@ -22127,7 +595,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -22147,13 +614,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import cron_workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_update_cron_workflow_request import IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_update_cron_workflow_request import IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -22166,7 +632,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -22174,10775 +640,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = cron_workflow_service_api.CronWorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | DEPRECATED: This field is ignored. - body = IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest( - cron_workflow=IoArgoprojWorkflowV1alpha1CronWorkflow( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1CronWorkflowSpec( - concurrency_policy="concurrency_policy_example", - failed_jobs_history_limit=1, - schedule="schedule_example", - schedules=[ - "schedules_example", - ], - starting_deadline_seconds=1, - stop_strategy=IoArgoprojWorkflowV1alpha1StopStrategy( - condition="condition_example", - ), - successful_jobs_history_limit=1, - suspend=True, - timezone="timezone_example", - workflow_metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - workflow_spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - ), - status=IoArgoprojWorkflowV1alpha1CronWorkflowStatus( - active=[ - ObjectReference( - api_version="api_version_example", - field_path="field_path_example", - kind="kind_example", - name="name_example", - namespace="namespace_example", - resource_version="resource_version_example", - uid="uid_example", - ), - ], - conditions=[ - IoArgoprojWorkflowV1alpha1Condition( - message="message_example", - status="status_example", - type="type_example", - ), - ], - failed=1, - last_scheduled_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - phase="phase_example", - succeeded=1, - ), - ), - name="name_example", - namespace="namespace_example", - ) # IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest | + api_instance = argo_workflows.CronWorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | DEPRECATED: This field is ignored. + body = argo_workflows.IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest() # IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.update_cron_workflow(namespace, name, body) + print("The response of CronWorkflowServiceApi->update_cron_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling CronWorkflowServiceApi->update_cron_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| DEPRECATED: This field is ignored. | - **body** | [**IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest**](IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest.md)| | + **namespace** | **str**| | + **name** | **str**| DEPRECATED: This field is ignored. | + **body** | [**IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest**](IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest.md)| | ### Return type @@ -32957,7 +677,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/docs/DownwardAPIProjection.md b/sdks/python/client/docs/DownwardAPIProjection.md index f58677f64e39..c503f2405e6a 100644 --- a/sdks/python/client/docs/DownwardAPIProjection.md +++ b/sdks/python/client/docs/DownwardAPIProjection.md @@ -3,11 +3,28 @@ Represents downward API info for projecting into a projected volume. Note that this is identical to a downwardAPI volume source without the default mode. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[DownwardAPIVolumeFile]**](DownwardAPIVolumeFile.md) | Items is a list of DownwardAPIVolume file | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**items** | [**List[DownwardAPIVolumeFile]**](DownwardAPIVolumeFile.md) | Items is a list of DownwardAPIVolume file | [optional] + +## Example + +```python +from argo_workflows.models.downward_api_projection import DownwardAPIProjection + +# TODO update the JSON string below +json = "{}" +# create an instance of DownwardAPIProjection from a JSON string +downward_api_projection_instance = DownwardAPIProjection.from_json(json) +# print the JSON string representation of the object +print(DownwardAPIProjection.to_json()) +# convert the object into a dict +downward_api_projection_dict = downward_api_projection_instance.to_dict() +# create an instance of DownwardAPIProjection from a dict +downward_api_projection_form_dict = downward_api_projection.from_dict(downward_api_projection_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/DownwardAPIVolumeFile.md b/sdks/python/client/docs/DownwardAPIVolumeFile.md index 9cfc4e483be2..fdf09b2d57af 100644 --- a/sdks/python/client/docs/DownwardAPIVolumeFile.md +++ b/sdks/python/client/docs/DownwardAPIVolumeFile.md @@ -3,14 +3,31 @@ DownwardAPIVolumeFile represents information to create the file containing the pod field ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**path** | **str** | Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..' | **field_ref** | [**ObjectFieldSelector**](ObjectFieldSelector.md) | | [optional] **mode** | **int** | Optional: mode bits used to set permissions on this file, must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. | [optional] +**path** | **str** | Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..' | **resource_field_ref** | [**ResourceFieldSelector**](ResourceFieldSelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.downward_api_volume_file import DownwardAPIVolumeFile + +# TODO update the JSON string below +json = "{}" +# create an instance of DownwardAPIVolumeFile from a JSON string +downward_api_volume_file_instance = DownwardAPIVolumeFile.from_json(json) +# print the JSON string representation of the object +print(DownwardAPIVolumeFile.to_json()) + +# convert the object into a dict +downward_api_volume_file_dict = downward_api_volume_file_instance.to_dict() +# create an instance of DownwardAPIVolumeFile from a dict +downward_api_volume_file_form_dict = downward_api_volume_file.from_dict(downward_api_volume_file_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/DownwardAPIVolumeSource.md b/sdks/python/client/docs/DownwardAPIVolumeSource.md index 9e4053f6bc6e..3e5d6fce6cd0 100644 --- a/sdks/python/client/docs/DownwardAPIVolumeSource.md +++ b/sdks/python/client/docs/DownwardAPIVolumeSource.md @@ -3,12 +3,29 @@ DownwardAPIVolumeSource represents a volume containing downward API info. Downward API volumes support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **default_mode** | **int** | Optional: mode bits to use on created files by default. Must be a Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. | [optional] -**items** | [**[DownwardAPIVolumeFile]**](DownwardAPIVolumeFile.md) | Items is a list of downward API volume file | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**items** | [**List[DownwardAPIVolumeFile]**](DownwardAPIVolumeFile.md) | Items is a list of downward API volume file | [optional] + +## Example + +```python +from argo_workflows.models.downward_api_volume_source import DownwardAPIVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of DownwardAPIVolumeSource from a JSON string +downward_api_volume_source_instance = DownwardAPIVolumeSource.from_json(json) +# print the JSON string representation of the object +print(DownwardAPIVolumeSource.to_json()) +# convert the object into a dict +downward_api_volume_source_dict = downward_api_volume_source_instance.to_dict() +# create an instance of DownwardAPIVolumeSource from a dict +downward_api_volume_source_form_dict = downward_api_volume_source.from_dict(downward_api_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Duration.md b/sdks/python/client/docs/Duration.md index 599dd08ebc7f..3585fad418d5 100644 --- a/sdks/python/client/docs/Duration.md +++ b/sdks/python/client/docs/Duration.md @@ -3,11 +3,28 @@ Duration is a wrapper around time.Duration which supports correct marshaling to YAML and JSON. In particular, it marshals into strings, which can be used as map keys in json. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **duration** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.duration import Duration + +# TODO update the JSON string below +json = "{}" +# create an instance of Duration from a JSON string +duration_instance = Duration.from_json(json) +# print the JSON string representation of the object +print(Duration.to_json()) + +# convert the object into a dict +duration_dict = duration_instance.to_dict() +# create an instance of Duration from a dict +duration_form_dict = duration.from_dict(duration_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EmptyDirVolumeSource.md b/sdks/python/client/docs/EmptyDirVolumeSource.md index 345366e016d5..a243c16b41fa 100644 --- a/sdks/python/client/docs/EmptyDirVolumeSource.md +++ b/sdks/python/client/docs/EmptyDirVolumeSource.md @@ -3,12 +3,29 @@ Represents an empty directory for a pod. Empty directory volumes support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **medium** | **str** | What type of storage medium should back this directory. The default is \"\" which means to use the node's default medium. Must be an empty string (default) or Memory. More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir | [optional] **size_limit** | **str** | Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: <quantity> ::= <signedNumber><suffix> (Note that <suffix> may be empty, from the \"\" case in <decimalSI>.) <digit> ::= 0 | 1 | ... | 9 <digits> ::= <digit> | <digit><digits> <number> ::= <digits> | <digits>.<digits> | <digits>. | .<digits> <sign> ::= \"+\" | \"-\" <signedNumber> ::= <number> | <sign><number> <suffix> ::= <binarySI> | <decimalExponent> | <decimalSI> <binarySI> ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) <decimalSI> ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) <decimalExponent> ::= \"e\" <signedNumber> | \"E\" <signedNumber> No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.empty_dir_volume_source import EmptyDirVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of EmptyDirVolumeSource from a JSON string +empty_dir_volume_source_instance = EmptyDirVolumeSource.from_json(json) +# print the JSON string representation of the object +print(EmptyDirVolumeSource.to_json()) + +# convert the object into a dict +empty_dir_volume_source_dict = empty_dir_volume_source_instance.to_dict() +# create an instance of EmptyDirVolumeSource from a dict +empty_dir_volume_source_form_dict = empty_dir_volume_source.from_dict(empty_dir_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EnvFromSource.md b/sdks/python/client/docs/EnvFromSource.md index 04698c94068e..bceddc82dba6 100644 --- a/sdks/python/client/docs/EnvFromSource.md +++ b/sdks/python/client/docs/EnvFromSource.md @@ -3,13 +3,30 @@ EnvFromSource represents the source of a set of ConfigMaps ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config_map_ref** | [**ConfigMapEnvSource**](ConfigMapEnvSource.md) | | [optional] **prefix** | **str** | An optional identifier to prepend to each key in the ConfigMap. Must be a C_IDENTIFIER. | [optional] **secret_ref** | [**SecretEnvSource**](SecretEnvSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.env_from_source import EnvFromSource + +# TODO update the JSON string below +json = "{}" +# create an instance of EnvFromSource from a JSON string +env_from_source_instance = EnvFromSource.from_json(json) +# print the JSON string representation of the object +print(EnvFromSource.to_json()) + +# convert the object into a dict +env_from_source_dict = env_from_source_instance.to_dict() +# create an instance of EnvFromSource from a dict +env_from_source_form_dict = env_from_source.from_dict(env_from_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EnvVar.md b/sdks/python/client/docs/EnvVar.md index 2124f220ea66..8ee37420b7f8 100644 --- a/sdks/python/client/docs/EnvVar.md +++ b/sdks/python/client/docs/EnvVar.md @@ -3,13 +3,30 @@ EnvVar represents an environment variable present in a Container. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name of the environment variable. Must be a C_IDENTIFIER. | **value** | **str** | Variable references $(VAR_NAME) are expanded using the previously defined environment variables in the container and any service environment variables. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Defaults to \"\". | [optional] **value_from** | [**EnvVarSource**](EnvVarSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.env_var import EnvVar + +# TODO update the JSON string below +json = "{}" +# create an instance of EnvVar from a JSON string +env_var_instance = EnvVar.from_json(json) +# print the JSON string representation of the object +print(EnvVar.to_json()) + +# convert the object into a dict +env_var_dict = env_var_instance.to_dict() +# create an instance of EnvVar from a dict +env_var_form_dict = env_var.from_dict(env_var_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EnvVarSource.md b/sdks/python/client/docs/EnvVarSource.md index 2dc152ee053a..c6515065df80 100644 --- a/sdks/python/client/docs/EnvVarSource.md +++ b/sdks/python/client/docs/EnvVarSource.md @@ -3,14 +3,31 @@ EnvVarSource represents a source for the value of an EnvVar. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config_map_key_ref** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | [optional] **field_ref** | [**ObjectFieldSelector**](ObjectFieldSelector.md) | | [optional] **resource_field_ref** | [**ResourceFieldSelector**](ResourceFieldSelector.md) | | [optional] **secret_key_ref** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.env_var_source import EnvVarSource + +# TODO update the JSON string below +json = "{}" +# create an instance of EnvVarSource from a JSON string +env_var_source_instance = EnvVarSource.from_json(json) +# print the JSON string representation of the object +print(EnvVarSource.to_json()) + +# convert the object into a dict +env_var_source_dict = env_var_source_instance.to_dict() +# create an instance of EnvVarSource from a dict +env_var_source_form_dict = env_var_source.from_dict(env_var_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EphemeralVolumeSource.md b/sdks/python/client/docs/EphemeralVolumeSource.md index 617c94a8e39e..f45236295d50 100644 --- a/sdks/python/client/docs/EphemeralVolumeSource.md +++ b/sdks/python/client/docs/EphemeralVolumeSource.md @@ -3,11 +3,28 @@ Represents an ephemeral volume that is handled by a normal storage driver. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **volume_claim_template** | [**PersistentVolumeClaimTemplate**](PersistentVolumeClaimTemplate.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.ephemeral_volume_source import EphemeralVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of EphemeralVolumeSource from a JSON string +ephemeral_volume_source_instance = EphemeralVolumeSource.from_json(json) +# print the JSON string representation of the object +print(EphemeralVolumeSource.to_json()) + +# convert the object into a dict +ephemeral_volume_source_dict = ephemeral_volume_source_instance.to_dict() +# create an instance of EphemeralVolumeSource from a dict +ephemeral_volume_source_form_dict = ephemeral_volume_source.from_dict(ephemeral_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Event.md b/sdks/python/client/docs/Event.md index 86daa40cf31a..b1f6f268652b 100644 --- a/sdks/python/client/docs/Event.md +++ b/sdks/python/client/docs/Event.md @@ -3,18 +3,19 @@ Event is a report of an event somewhere in the cluster. Events have a limited retention time and triggers and messages may evolve with time. Event consumers should not rely on the timing of an event with a given Reason reflecting a consistent underlying trigger, or the continued existence of events with that Reason. Events should be treated as informative, best-effort, supplemental data. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**involved_object** | [**ObjectReference**](ObjectReference.md) | | -**metadata** | [**ObjectMeta**](ObjectMeta.md) | | **action** | **str** | What action was taken/failed regarding to the Regarding object. | [optional] **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] **count** | **int** | The number of times this event has occurred. | [optional] **event_time** | **datetime** | MicroTime is version of Time with microsecond level precision. | [optional] **first_timestamp** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] +**involved_object** | [**ObjectReference**](ObjectReference.md) | | **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] **last_timestamp** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **message** | **str** | A human-readable description of the status of this operation. | [optional] +**metadata** | [**ObjectMeta**](ObjectMeta.md) | | **reason** | **str** | This should be a short, machine understandable string that gives the reason for the transition into the object's current status. | [optional] **related** | [**ObjectReference**](ObjectReference.md) | | [optional] **reporting_component** | **str** | Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`. | [optional] @@ -22,8 +23,24 @@ Name | Type | Description | Notes **series** | [**EventSeries**](EventSeries.md) | | [optional] **source** | [**EventSource**](EventSource.md) | | [optional] **type** | **str** | Type of this event (Normal, Warning), new types could be added in the future | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.event import Event + +# TODO update the JSON string below +json = "{}" +# create an instance of Event from a JSON string +event_instance = Event.from_json(json) +# print the JSON string representation of the object +print(Event.to_json()) + +# convert the object into a dict +event_dict = event_instance.to_dict() +# create an instance of Event from a dict +event_form_dict = event.from_dict(event_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EventSeries.md b/sdks/python/client/docs/EventSeries.md index 334b5cc3f5b7..822505e31ca3 100644 --- a/sdks/python/client/docs/EventSeries.md +++ b/sdks/python/client/docs/EventSeries.md @@ -3,12 +3,29 @@ EventSeries contain information on series of events, i.e. thing that was/is happening continuously for some time. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **count** | **int** | Number of occurrences in this series up to the last heartbeat time | [optional] **last_observed_time** | **datetime** | MicroTime is version of Time with microsecond level precision. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.event_series import EventSeries + +# TODO update the JSON string below +json = "{}" +# create an instance of EventSeries from a JSON string +event_series_instance = EventSeries.from_json(json) +# print the JSON string representation of the object +print(EventSeries.to_json()) + +# convert the object into a dict +event_series_dict = event_series_instance.to_dict() +# create an instance of EventSeries from a dict +event_series_form_dict = event_series.from_dict(event_series_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EventServiceApi.md b/sdks/python/client/docs/EventServiceApi.md index f013152208e0..f96a2f98027d 100644 --- a/sdks/python/client/docs/EventServiceApi.md +++ b/sdks/python/client/docs/EventServiceApi.md @@ -9,7 +9,7 @@ Method | HTTP request | Description # **list_workflow_event_bindings** -> IoArgoprojWorkflowV1alpha1WorkflowEventBindingList list_workflow_event_bindings(namespace) +> IoArgoprojWorkflowV1alpha1WorkflowEventBindingList list_workflow_event_bindings(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) @@ -18,12 +18,11 @@ Method | HTTP request | Description * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import event_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_event_binding_list import IoArgoprojWorkflowV1alpha1WorkflowEventBindingList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding_list import IoArgoprojWorkflowV1alpha1WorkflowEventBindingList +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -36,7 +35,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -44,49 +43,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = event_service_api.EventServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.EventServiceApi(api_client) + namespace = 'namespace_example' # str | + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.list_workflow_event_bindings(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling EventServiceApi->list_workflow_event_bindings: %s\n" % e) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.list_workflow_event_bindings(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) + print("The response of EventServiceApi->list_workflow_event_bindings:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling EventServiceApi->list_workflow_event_bindings: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **namespace** | **str**| | + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] ### Return type @@ -101,7 +94,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -112,7 +104,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **receive_event** -> bool, date, datetime, dict, float, int, list, str, none_type receive_event(namespace, discriminator, body) +> object receive_event(namespace, discriminator, body) @@ -121,11 +113,10 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import event_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -138,7 +129,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -146,31 +137,33 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = event_service_api.EventServiceApi(api_client) - namespace = "namespace_example" # str | The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. - discriminator = "discriminator_example" # str | Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` - body = {} # bool, date, datetime, dict, float, int, list, str, none_type | The event itself can be any data. + api_instance = argo_workflows.EventServiceApi(api_client) + namespace = 'namespace_example' # str | The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. + discriminator = 'discriminator_example' # str | Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` + body = None # object | The event itself can be any data. - # example passing only required values which don't have defaults set try: api_response = api_instance.receive_event(namespace, discriminator, body) + print("The response of EventServiceApi->receive_event:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling EventServiceApi->receive_event: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. | - **discriminator** | **str**| Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` | - **body** | **bool, date, datetime, dict, float, int, list, str, none_type**| The event itself can be any data. | + **namespace** | **str**| The namespace for the io.argoproj.workflow.v1alpha1. This can be empty if the client has cluster scoped permissions. If empty, then the event is \"broadcast\" to workflow event binding in all namespaces. | + **discriminator** | **str**| Optional discriminator for the io.argoproj.workflow.v1alpha1. This should almost always be empty. Used for edge-cases where the event payload alone is not provide enough information to discriminate the event. This MUST NOT be used as security mechanism, e.g. to allow two clients to use the same access token, or to support webhooks on unsecured server. Instead, use access tokens. This is made available as `discriminator` in the event binding selector (`/spec/event/selector)` | + **body** | **object**| The event itself can be any data. | ### Return type -**bool, date, datetime, dict, float, int, list, str, none_type** +**object** ### Authorization @@ -181,7 +174,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/docs/EventSource.md b/sdks/python/client/docs/EventSource.md index 82620b1cf011..0519218734c3 100644 --- a/sdks/python/client/docs/EventSource.md +++ b/sdks/python/client/docs/EventSource.md @@ -3,12 +3,29 @@ EventSource contains information for an event. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **component** | **str** | Component from which the event is generated. | [optional] **host** | **str** | Node name on which the event is generated. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.event_source import EventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of EventSource from a JSON string +event_source_instance = EventSource.from_json(json) +# print the JSON string representation of the object +print(EventSource.to_json()) + +# convert the object into a dict +event_source_dict = event_source_instance.to_dict() +# create an instance of EventSource from a dict +event_source_form_dict = event_source.from_dict(event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EventSourceServiceApi.md b/sdks/python/client/docs/EventSourceServiceApi.md index 208e197b29ac..194cc44ded07 100644 --- a/sdks/python/client/docs/EventSourceServiceApi.md +++ b/sdks/python/client/docs/EventSourceServiceApi.md @@ -23,13 +23,12 @@ Method | HTTP request | Description * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import event_source_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource -from argo_workflows.model.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest +from argo_workflows.models.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -42,7 +41,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -50,2446 +49,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = event_source_service_api.EventSourceServiceApi(api_client) - namespace = "namespace_example" # str | - body = EventsourceCreateEventSourceRequest( - event_source=IoArgoprojEventsV1alpha1EventSource( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojEventsV1alpha1EventSourceSpec( - amqp={ - "key": IoArgoprojEventsV1alpha1AMQPEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - consume=IoArgoprojEventsV1alpha1AMQPConsumeConfig( - auto_ack=True, - consumer_tag="consumer_tag_example", - exclusive=True, - no_local=True, - no_wait=True, - ), - exchange_declare=IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig( - auto_delete=True, - durable=True, - internal=True, - no_wait=True, - ), - exchange_name="exchange_name_example", - exchange_type="exchange_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - queue_bind=IoArgoprojEventsV1alpha1AMQPQueueBindConfig( - no_wait=True, - ), - queue_declare=IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig( - arguments="arguments_example", - auto_delete=True, - durable=True, - exclusive=True, - name="name_example", - no_wait=True, - ), - routing_key="routing_key_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - url="url_example", - url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - azure_events_hub={ - "key": IoArgoprojEventsV1alpha1AzureEventsHubEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - fqdn="fqdn_example", - hub_name="hub_name_example", - metadata={ - "key": "key_example", - }, - shared_access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - shared_access_key_name=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - azure_queue_storage={ - "key": IoArgoprojEventsV1alpha1AzureQueueStorageEventSource( - connection_string=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - decode_message=True, - dlq=True, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - queue_name="queue_name_example", - storage_account_name="storage_account_name_example", - wait_time_in_seconds=1, - ), - }, - azure_service_bus={ - "key": IoArgoprojEventsV1alpha1AzureServiceBusEventSource( - connection_string=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - fully_qualified_namespace="fully_qualified_namespace_example", - json_body=True, - metadata={ - "key": "key_example", - }, - queue_name="queue_name_example", - subscription_name="subscription_name_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic_name="topic_name_example", - ), - }, - bitbucket={ - "key": IoArgoprojEventsV1alpha1BitbucketEventSource( - auth=IoArgoprojEventsV1alpha1BitbucketAuth( - basic=IoArgoprojEventsV1alpha1BitbucketBasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - delete_hook_on_finish=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - owner="owner_example", - project_key="project_key_example", - repositories=[ - IoArgoprojEventsV1alpha1BitbucketRepository( - owner="owner_example", - repository_slug="repository_slug_example", - ), - ], - repository_slug="repository_slug_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - bitbucketserver={ - "key": IoArgoprojEventsV1alpha1BitbucketServerEventSource( - access_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bitbucketserver_base_url="bitbucketserver_base_url_example", - delete_hook_on_finish=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - project_key="project_key_example", - repositories=[ - IoArgoprojEventsV1alpha1BitbucketServerRepository( - project_key="project_key_example", - repository_slug="repository_slug_example", - ), - ], - repository_slug="repository_slug_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - webhook_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - calendar={ - "key": IoArgoprojEventsV1alpha1CalendarEventSource( - exclusion_dates=[ - "exclusion_dates_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - interval="interval_example", - metadata={ - "key": "key_example", - }, - persistence=IoArgoprojEventsV1alpha1EventPersistence( - catchup=IoArgoprojEventsV1alpha1CatchupConfiguration( - enabled=True, - max_duration="max_duration_example", - ), - config_map=IoArgoprojEventsV1alpha1ConfigMapPersistence( - create_if_not_exist=True, - name="name_example", - ), - ), - schedule="schedule_example", - timezone="timezone_example", - ), - }, - emitter={ - "key": IoArgoprojEventsV1alpha1EmitterEventSource( - broker="broker_example", - channel_key="channel_key_example", - channel_name="channel_name_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - event_bus_name="event_bus_name_example", - file={ - "key": IoArgoprojEventsV1alpha1FileEventSource( - event_type="event_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - polling=True, - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( - directory="directory_example", - path="path_example", - path_regexp="path_regexp_example", - ), - ), - }, - generic={ - "key": IoArgoprojEventsV1alpha1GenericEventSource( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - config="config_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - insecure=True, - json_body=True, - metadata={ - "key": "key_example", - }, - url="url_example", - ), - }, - gerrit={ - "key": IoArgoprojEventsV1alpha1GerritEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - delete_hook_on_finish=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - gerrit_base_url="gerrit_base_url_example", - hook_name="hook_name_example", - metadata={ - "key": "key_example", - }, - projects=[ - "projects_example", - ], - ssl_verify=True, - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - github={ - "key": IoArgoprojEventsV1alpha1GithubEventSource( - active=True, - api_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - content_type="content_type_example", - delete_hook_on_finish=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - github_app=IoArgoprojEventsV1alpha1GithubAppCreds( - app_id="app_id_example", - installation_id="installation_id_example", - private_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - github_base_url="github_base_url_example", - github_upload_url="github_upload_url_example", - id="id_example", - insecure=True, - metadata={ - "key": "key_example", - }, - organizations=[ - "organizations_example", - ], - owner="owner_example", - repositories=[ - IoArgoprojEventsV1alpha1OwnedRepositories( - names=[ - "names_example", - ], - owner="owner_example", - ), - ], - repository="repository_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - webhook_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - gitlab={ - "key": IoArgoprojEventsV1alpha1GitlabEventSource( - access_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - delete_hook_on_finish=True, - enable_ssl_verification=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - gitlab_base_url="gitlab_base_url_example", - groups=[ - "groups_example", - ], - metadata={ - "key": "key_example", - }, - project_id="project_id_example", - projects=[ - "projects_example", - ], - secret_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - hdfs={ - "key": IoArgoprojEventsV1alpha1HDFSEventSource( - addresses=[ - "addresses_example", - ], - check_interval="check_interval_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - metadata={ - "key": "key_example", - }, - type="type_example", - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( - directory="directory_example", - path="path_example", - path_regexp="path_regexp_example", - ), - ), - }, - kafka={ - "key": IoArgoprojEventsV1alpha1KafkaEventSource( - config="config_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - consumer_group=IoArgoprojEventsV1alpha1KafkaConsumerGroup( - group_name="group_name_example", - oldest=True, - rebalance_strategy="rebalance_strategy_example", - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - limit_events_per_second="limit_events_per_second_example", - metadata={ - "key": "key_example", - }, - partition="partition_example", - sasl=IoArgoprojEventsV1alpha1SASLConfig( - mechanism="mechanism_example", - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - user_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic="topic_example", - url="url_example", - version="version_example", - ), - }, - minio={ - "key": IoArgoprojEventsV1alpha1S3Artifact( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( - key="key_example", - name="name_example", - ), - ca_certificate=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1S3Filter( - prefix="prefix_example", - suffix="suffix_example", - ), - insecure=True, - metadata={ - "key": "key_example", - }, - region="region_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - mqtt={ - "key": IoArgoprojEventsV1alpha1MQTTEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_id="client_id_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic="topic_example", - url="url_example", - ), - }, - nats={ - "key": IoArgoprojEventsV1alpha1NATSEventsSource( - auth=IoArgoprojEventsV1alpha1NATSAuth( - basic=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - credential=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - nkey=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - subject="subject_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - url="url_example", - ), - }, - nsq={ - "key": IoArgoprojEventsV1alpha1NSQEventSource( - channel="channel_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - host_address="host_address_example", - json_body=True, - metadata={ - "key": "key_example", - }, - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic="topic_example", - ), - }, - pub_sub={ - "key": IoArgoprojEventsV1alpha1PubSubEventSource( - credential_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - delete_subscription_on_finish=True, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - project_id="project_id_example", - subscription_id="subscription_id_example", - topic="topic_example", - topic_project_id="topic_project_id_example", - ), - }, - pulsar={ - "key": IoArgoprojEventsV1alpha1PulsarEventSource( - auth_athenz_params={ - "key": "key_example", - }, - auth_athenz_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - auth_token_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - tls_allow_insecure_connection=True, - tls_trust_certs_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tls_validate_hostname=True, - topics=[ - "topics_example", - ], - type="type_example", - url="url_example", - ), - }, - redis={ - "key": IoArgoprojEventsV1alpha1RedisEventSource( - channels=[ - "channels_example", - ], - db=1, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - host_address="host_address_example", - json_body=True, - metadata={ - "key": "key_example", - }, - namespace="namespace_example", - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - username="username_example", - ), - }, - redis_stream={ - "key": IoArgoprojEventsV1alpha1RedisStreamEventSource( - consumer_group="consumer_group_example", - db=1, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - host_address="host_address_example", - max_msg_count_per_read=1, - metadata={ - "key": "key_example", - }, - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - streams=[ - "streams_example", - ], - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - username="username_example", - ), - }, - replicas=1, - resource={ - "key": IoArgoprojEventsV1alpha1ResourceEventSource( - event_types=[ - "event_types_example", - ], - filter=IoArgoprojEventsV1alpha1ResourceFilter( - after_start=True, - created_by=dateutil_parser('1970-01-01T00:00:00.00Z'), - fields=[ - IoArgoprojEventsV1alpha1Selector( - key="key_example", - operation="operation_example", - value="value_example", - ), - ], - labels=[ - IoArgoprojEventsV1alpha1Selector( - key="key_example", - operation="operation_example", - value="value_example", - ), - ], - prefix="prefix_example", - ), - group_version_resource=GroupVersionResource( - group="group_example", - resource="resource_example", - version="version_example", - ), - metadata={ - "key": "key_example", - }, - namespace="namespace_example", - ), - }, - service=IoArgoprojEventsV1alpha1Service( - cluster_ip="cluster_ip_example", - ports=[ - ServicePort( - app_protocol="app_protocol_example", - name="name_example", - node_port=1, - port=1, - protocol="SCTP", - target_port="target_port_example", - ), - ], - ), - sftp={ - "key": IoArgoprojEventsV1alpha1SFTPEventSource( - address=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - event_type="event_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - poll_interval_duration="poll_interval_duration_example", - ssh_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( - directory="directory_example", - path="path_example", - path_regexp="path_regexp_example", - ), - ), - }, - slack={ - "key": IoArgoprojEventsV1alpha1SlackEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - signing_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - sns={ - "key": IoArgoprojEventsV1alpha1SNSEventSource( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - region="region_example", - role_arn="role_arn_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - topic_arn="topic_arn_example", - validate_signature=True, - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - sqs={ - "key": IoArgoprojEventsV1alpha1SQSEventSource( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - dlq=True, - endpoint="endpoint_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - queue="queue_example", - queue_account_id="queue_account_id_example", - region="region_example", - role_arn="role_arn_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - session_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - wait_time_seconds="wait_time_seconds_example", - ), - }, - storage_grid={ - "key": IoArgoprojEventsV1alpha1StorageGridEventSource( - api_url="api_url_example", - auth_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1StorageGridFilter( - prefix="prefix_example", - suffix="suffix_example", - ), - metadata={ - "key": "key_example", - }, - region="region_example", - topic_arn="topic_arn_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - stripe={ - "key": IoArgoprojEventsV1alpha1StripeEventSource( - api_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_webhook=True, - event_filter=[ - "event_filter_example", - ], - metadata={ - "key": "key_example", - }, - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - template=IoArgoprojEventsV1alpha1Template( - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metadata=IoArgoprojEventsV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - node_selector={ - "key": "key_example", - }, - priority=1, - priority_class_name="priority_class_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - webhook={ - "key": IoArgoprojEventsV1alpha1WebhookEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - webhook_context=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - ), - status=IoArgoprojEventsV1alpha1EventSourceStatus( - status=IoArgoprojEventsV1alpha1Status( - conditions=[ - IoArgoprojEventsV1alpha1Condition( - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="type_example", - ), - ], - ), - ), - ), - namespace="namespace_example", - ) # EventsourceCreateEventSourceRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.EventSourceServiceApi(api_client) + namespace = 'namespace_example' # str | + body = argo_workflows.EventsourceCreateEventSourceRequest() # EventsourceCreateEventSourceRequest | + try: api_response = api_instance.create_event_source(namespace, body) + print("The response of EventSourceServiceApi->create_event_source:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling EventSourceServiceApi->create_event_source: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **body** | [**EventsourceCreateEventSourceRequest**](EventsourceCreateEventSourceRequest.md)| | + **namespace** | **str**| | + **body** | [**EventsourceCreateEventSourceRequest**](EventsourceCreateEventSourceRequest.md)| | ### Return type @@ -2504,7 +84,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2515,7 +94,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **delete_event_source** -> bool, date, datetime, dict, float, int, list, str, none_type delete_event_source(namespace, name) +> object delete_event_source(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) @@ -2524,11 +103,10 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import event_source_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2541,7 +119,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2549,51 +127,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = event_source_service_api.EventSourceServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - delete_options_grace_period_seconds = "deleteOptions.gracePeriodSeconds_example" # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) - delete_options_preconditions_uid = "deleteOptions.preconditions.uid_example" # str | Specifies the target UID. +optional. (optional) - delete_options_preconditions_resource_version = "deleteOptions.preconditions.resourceVersion_example" # str | Specifies the target ResourceVersion +optional. (optional) + api_instance = argo_workflows.EventSourceServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + delete_options_grace_period_seconds = 'delete_options_grace_period_seconds_example' # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) + delete_options_preconditions_uid = 'delete_options_preconditions_uid_example' # str | Specifies the target UID. +optional. (optional) + delete_options_preconditions_resource_version = 'delete_options_preconditions_resource_version_example' # str | Specifies the target ResourceVersion +optional. (optional) delete_options_orphan_dependents = True # bool | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. (optional) - delete_options_propagation_policy = "deleteOptions.propagationPolicy_example" # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) - delete_options_dry_run = [ - "deleteOptions.dryRun_example", - ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) + delete_options_propagation_policy = 'delete_options_propagation_policy_example' # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) + delete_options_dry_run = ['delete_options_dry_run_example'] # List[str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.delete_event_source(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling EventSourceServiceApi->delete_event_source: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.delete_event_source(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + print("The response of EventSourceServiceApi->delete_event_source:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling EventSourceServiceApi->delete_event_source: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] - **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] - **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] - **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] - **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] - **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] + **namespace** | **str**| | + **name** | **str**| | + **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] + **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] + **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] + **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] + **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] + **delete_options_dry_run** | [**List[str]**](str.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] ### Return type -**bool, date, datetime, dict, float, int, list, str, none_type** +**object** ### Authorization @@ -2604,7 +174,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2615,7 +184,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **event_sources_logs** -> StreamResultOfEventsourceLogEntry event_sources_logs(namespace) +> StreamResultOfEventsourceLogEntry event_sources_logs(namespace, name=name, event_source_type=event_source_type, event_name=event_name, grep=grep, pod_log_options_container=pod_log_options_container, pod_log_options_follow=pod_log_options_follow, pod_log_options_previous=pod_log_options_previous, pod_log_options_since_seconds=pod_log_options_since_seconds, pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, pod_log_options_timestamps=pod_log_options_timestamps, pod_log_options_tail_lines=pod_log_options_tail_lines, pod_log_options_limit_bytes=pod_log_options_limit_bytes, pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend) @@ -2624,12 +193,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import event_source_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry +from argo_workflows.models.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2642,7 +210,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2650,59 +218,53 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = event_source_service_api.EventSourceServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | optional - only return entries for this event source. (optional) - event_source_type = "eventSourceType_example" # str | optional - only return entries for this event source type (e.g. `webhook`). (optional) - event_name = "eventName_example" # str | optional - only return entries for this event name (e.g. `example`). (optional) - grep = "grep_example" # str | optional - only return entries where `msg` matches this regular expression. (optional) - pod_log_options_container = "podLogOptions.container_example" # str | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. (optional) + api_instance = argo_workflows.EventSourceServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | optional - only return entries for this event source. (optional) + event_source_type = 'event_source_type_example' # str | optional - only return entries for this event source type (e.g. `webhook`). (optional) + event_name = 'event_name_example' # str | optional - only return entries for this event name (e.g. `example`). (optional) + grep = 'grep_example' # str | optional - only return entries where `msg` matches this regular expression. (optional) + pod_log_options_container = 'pod_log_options_container_example' # str | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. (optional) pod_log_options_follow = True # bool | Follow the log stream of the pod. Defaults to false. +optional. (optional) pod_log_options_previous = True # bool | Return previous terminated container logs. Defaults to false. +optional. (optional) - pod_log_options_since_seconds = "podLogOptions.sinceSeconds_example" # str | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. (optional) - pod_log_options_since_time_seconds = "podLogOptions.sinceTime.seconds_example" # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) - pod_log_options_since_time_nanos = 1 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) + pod_log_options_since_seconds = 'pod_log_options_since_seconds_example' # str | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. (optional) + pod_log_options_since_time_seconds = 'pod_log_options_since_time_seconds_example' # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) + pod_log_options_since_time_nanos = 56 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) pod_log_options_timestamps = True # bool | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. (optional) - pod_log_options_tail_lines = "podLogOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) - pod_log_options_limit_bytes = "podLogOptions.limitBytes_example" # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) + pod_log_options_tail_lines = 'pod_log_options_tail_lines_example' # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) + pod_log_options_limit_bytes = 'pod_log_options_limit_bytes_example' # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) pod_log_options_insecure_skip_tls_verify_backend = True # bool | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.event_sources_logs(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling EventSourceServiceApi->event_sources_logs: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.event_sources_logs(namespace, name=name, event_source_type=event_source_type, event_name=event_name, grep=grep, pod_log_options_container=pod_log_options_container, pod_log_options_follow=pod_log_options_follow, pod_log_options_previous=pod_log_options_previous, pod_log_options_since_seconds=pod_log_options_since_seconds, pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, pod_log_options_timestamps=pod_log_options_timestamps, pod_log_options_tail_lines=pod_log_options_tail_lines, pod_log_options_limit_bytes=pod_log_options_limit_bytes, pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend) + print("The response of EventSourceServiceApi->event_sources_logs:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling EventSourceServiceApi->event_sources_logs: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| optional - only return entries for this event source. | [optional] - **event_source_type** | **str**| optional - only return entries for this event source type (e.g. `webhook`). | [optional] - **event_name** | **str**| optional - only return entries for this event name (e.g. `example`). | [optional] - **grep** | **str**| optional - only return entries where `msg` matches this regular expression. | [optional] - **pod_log_options_container** | **str**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] - **pod_log_options_follow** | **bool**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] - **pod_log_options_previous** | **bool**| Return previous terminated container logs. Defaults to false. +optional. | [optional] - **pod_log_options_since_seconds** | **str**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] - **pod_log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] - **pod_log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] - **pod_log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **pod_log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] - **pod_log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] - **pod_log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **namespace** | **str**| | + **name** | **str**| optional - only return entries for this event source. | [optional] + **event_source_type** | **str**| optional - only return entries for this event source type (e.g. `webhook`). | [optional] + **event_name** | **str**| optional - only return entries for this event name (e.g. `example`). | [optional] + **grep** | **str**| optional - only return entries where `msg` matches this regular expression. | [optional] + **pod_log_options_container** | **str**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] + **pod_log_options_follow** | **bool**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] + **pod_log_options_previous** | **bool**| Return previous terminated container logs. Defaults to false. +optional. | [optional] + **pod_log_options_since_seconds** | **str**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] + **pod_log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] + **pod_log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] + **pod_log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] + **pod_log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **pod_log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] + **pod_log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] ### Return type @@ -2717,7 +279,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2737,12 +298,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import event_source_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2755,7 +315,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2763,25 +323,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = event_source_service_api.EventSourceServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | + api_instance = argo_workflows.EventSourceServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | - # example passing only required values which don't have defaults set try: api_response = api_instance.get_event_source(namespace, name) + print("The response of EventSourceServiceApi->get_event_source:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling EventSourceServiceApi->get_event_source: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | + **namespace** | **str**| | + **name** | **str**| | ### Return type @@ -2796,7 +358,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2807,7 +368,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_event_sources** -> IoArgoprojEventsV1alpha1EventSourceList list_event_sources(namespace) +> IoArgoprojEventsV1alpha1EventSourceList list_event_sources(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) @@ -2816,12 +377,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import event_source_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2834,7 +394,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2842,49 +402,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = event_source_service_api.EventSourceServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.EventSourceServiceApi(api_client) + namespace = 'namespace_example' # str | + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.list_event_sources(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling EventSourceServiceApi->list_event_sources: %s\n" % e) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.list_event_sources(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) + print("The response of EventSourceServiceApi->list_event_sources:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling EventSourceServiceApi->list_event_sources: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **namespace** | **str**| | + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] ### Return type @@ -2899,7 +453,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2919,13 +472,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import event_source_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource -from argo_workflows.model.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest +from argo_workflows.models.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2938,7 +490,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2946,2449 +498,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = event_source_service_api.EventSourceServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = EventsourceUpdateEventSourceRequest( - event_source=IoArgoprojEventsV1alpha1EventSource( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojEventsV1alpha1EventSourceSpec( - amqp={ - "key": IoArgoprojEventsV1alpha1AMQPEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - consume=IoArgoprojEventsV1alpha1AMQPConsumeConfig( - auto_ack=True, - consumer_tag="consumer_tag_example", - exclusive=True, - no_local=True, - no_wait=True, - ), - exchange_declare=IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig( - auto_delete=True, - durable=True, - internal=True, - no_wait=True, - ), - exchange_name="exchange_name_example", - exchange_type="exchange_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - queue_bind=IoArgoprojEventsV1alpha1AMQPQueueBindConfig( - no_wait=True, - ), - queue_declare=IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig( - arguments="arguments_example", - auto_delete=True, - durable=True, - exclusive=True, - name="name_example", - no_wait=True, - ), - routing_key="routing_key_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - url="url_example", - url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - azure_events_hub={ - "key": IoArgoprojEventsV1alpha1AzureEventsHubEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - fqdn="fqdn_example", - hub_name="hub_name_example", - metadata={ - "key": "key_example", - }, - shared_access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - shared_access_key_name=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - azure_queue_storage={ - "key": IoArgoprojEventsV1alpha1AzureQueueStorageEventSource( - connection_string=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - decode_message=True, - dlq=True, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - queue_name="queue_name_example", - storage_account_name="storage_account_name_example", - wait_time_in_seconds=1, - ), - }, - azure_service_bus={ - "key": IoArgoprojEventsV1alpha1AzureServiceBusEventSource( - connection_string=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - fully_qualified_namespace="fully_qualified_namespace_example", - json_body=True, - metadata={ - "key": "key_example", - }, - queue_name="queue_name_example", - subscription_name="subscription_name_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic_name="topic_name_example", - ), - }, - bitbucket={ - "key": IoArgoprojEventsV1alpha1BitbucketEventSource( - auth=IoArgoprojEventsV1alpha1BitbucketAuth( - basic=IoArgoprojEventsV1alpha1BitbucketBasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - delete_hook_on_finish=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - owner="owner_example", - project_key="project_key_example", - repositories=[ - IoArgoprojEventsV1alpha1BitbucketRepository( - owner="owner_example", - repository_slug="repository_slug_example", - ), - ], - repository_slug="repository_slug_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - bitbucketserver={ - "key": IoArgoprojEventsV1alpha1BitbucketServerEventSource( - access_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bitbucketserver_base_url="bitbucketserver_base_url_example", - delete_hook_on_finish=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - project_key="project_key_example", - repositories=[ - IoArgoprojEventsV1alpha1BitbucketServerRepository( - project_key="project_key_example", - repository_slug="repository_slug_example", - ), - ], - repository_slug="repository_slug_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - webhook_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - calendar={ - "key": IoArgoprojEventsV1alpha1CalendarEventSource( - exclusion_dates=[ - "exclusion_dates_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - interval="interval_example", - metadata={ - "key": "key_example", - }, - persistence=IoArgoprojEventsV1alpha1EventPersistence( - catchup=IoArgoprojEventsV1alpha1CatchupConfiguration( - enabled=True, - max_duration="max_duration_example", - ), - config_map=IoArgoprojEventsV1alpha1ConfigMapPersistence( - create_if_not_exist=True, - name="name_example", - ), - ), - schedule="schedule_example", - timezone="timezone_example", - ), - }, - emitter={ - "key": IoArgoprojEventsV1alpha1EmitterEventSource( - broker="broker_example", - channel_key="channel_key_example", - channel_name="channel_name_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - event_bus_name="event_bus_name_example", - file={ - "key": IoArgoprojEventsV1alpha1FileEventSource( - event_type="event_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - polling=True, - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( - directory="directory_example", - path="path_example", - path_regexp="path_regexp_example", - ), - ), - }, - generic={ - "key": IoArgoprojEventsV1alpha1GenericEventSource( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - config="config_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - insecure=True, - json_body=True, - metadata={ - "key": "key_example", - }, - url="url_example", - ), - }, - gerrit={ - "key": IoArgoprojEventsV1alpha1GerritEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - delete_hook_on_finish=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - gerrit_base_url="gerrit_base_url_example", - hook_name="hook_name_example", - metadata={ - "key": "key_example", - }, - projects=[ - "projects_example", - ], - ssl_verify=True, - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - github={ - "key": IoArgoprojEventsV1alpha1GithubEventSource( - active=True, - api_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - content_type="content_type_example", - delete_hook_on_finish=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - github_app=IoArgoprojEventsV1alpha1GithubAppCreds( - app_id="app_id_example", - installation_id="installation_id_example", - private_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - github_base_url="github_base_url_example", - github_upload_url="github_upload_url_example", - id="id_example", - insecure=True, - metadata={ - "key": "key_example", - }, - organizations=[ - "organizations_example", - ], - owner="owner_example", - repositories=[ - IoArgoprojEventsV1alpha1OwnedRepositories( - names=[ - "names_example", - ], - owner="owner_example", - ), - ], - repository="repository_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - webhook_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - gitlab={ - "key": IoArgoprojEventsV1alpha1GitlabEventSource( - access_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - delete_hook_on_finish=True, - enable_ssl_verification=True, - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - gitlab_base_url="gitlab_base_url_example", - groups=[ - "groups_example", - ], - metadata={ - "key": "key_example", - }, - project_id="project_id_example", - projects=[ - "projects_example", - ], - secret_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - hdfs={ - "key": IoArgoprojEventsV1alpha1HDFSEventSource( - addresses=[ - "addresses_example", - ], - check_interval="check_interval_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - metadata={ - "key": "key_example", - }, - type="type_example", - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( - directory="directory_example", - path="path_example", - path_regexp="path_regexp_example", - ), - ), - }, - kafka={ - "key": IoArgoprojEventsV1alpha1KafkaEventSource( - config="config_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - consumer_group=IoArgoprojEventsV1alpha1KafkaConsumerGroup( - group_name="group_name_example", - oldest=True, - rebalance_strategy="rebalance_strategy_example", - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - limit_events_per_second="limit_events_per_second_example", - metadata={ - "key": "key_example", - }, - partition="partition_example", - sasl=IoArgoprojEventsV1alpha1SASLConfig( - mechanism="mechanism_example", - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - user_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic="topic_example", - url="url_example", - version="version_example", - ), - }, - minio={ - "key": IoArgoprojEventsV1alpha1S3Artifact( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( - key="key_example", - name="name_example", - ), - ca_certificate=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1S3Filter( - prefix="prefix_example", - suffix="suffix_example", - ), - insecure=True, - metadata={ - "key": "key_example", - }, - region="region_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - }, - mqtt={ - "key": IoArgoprojEventsV1alpha1MQTTEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_id="client_id_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic="topic_example", - url="url_example", - ), - }, - nats={ - "key": IoArgoprojEventsV1alpha1NATSEventsSource( - auth=IoArgoprojEventsV1alpha1NATSAuth( - basic=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - credential=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - nkey=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - subject="subject_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - url="url_example", - ), - }, - nsq={ - "key": IoArgoprojEventsV1alpha1NSQEventSource( - channel="channel_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - host_address="host_address_example", - json_body=True, - metadata={ - "key": "key_example", - }, - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic="topic_example", - ), - }, - pub_sub={ - "key": IoArgoprojEventsV1alpha1PubSubEventSource( - credential_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - delete_subscription_on_finish=True, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - project_id="project_id_example", - subscription_id="subscription_id_example", - topic="topic_example", - topic_project_id="topic_project_id_example", - ), - }, - pulsar={ - "key": IoArgoprojEventsV1alpha1PulsarEventSource( - auth_athenz_params={ - "key": "key_example", - }, - auth_athenz_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - auth_token_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - tls_allow_insecure_connection=True, - tls_trust_certs_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tls_validate_hostname=True, - topics=[ - "topics_example", - ], - type="type_example", - url="url_example", - ), - }, - redis={ - "key": IoArgoprojEventsV1alpha1RedisEventSource( - channels=[ - "channels_example", - ], - db=1, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - host_address="host_address_example", - json_body=True, - metadata={ - "key": "key_example", - }, - namespace="namespace_example", - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - username="username_example", - ), - }, - redis_stream={ - "key": IoArgoprojEventsV1alpha1RedisStreamEventSource( - consumer_group="consumer_group_example", - db=1, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - host_address="host_address_example", - max_msg_count_per_read=1, - metadata={ - "key": "key_example", - }, - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - streams=[ - "streams_example", - ], - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - username="username_example", - ), - }, - replicas=1, - resource={ - "key": IoArgoprojEventsV1alpha1ResourceEventSource( - event_types=[ - "event_types_example", - ], - filter=IoArgoprojEventsV1alpha1ResourceFilter( - after_start=True, - created_by=dateutil_parser('1970-01-01T00:00:00.00Z'), - fields=[ - IoArgoprojEventsV1alpha1Selector( - key="key_example", - operation="operation_example", - value="value_example", - ), - ], - labels=[ - IoArgoprojEventsV1alpha1Selector( - key="key_example", - operation="operation_example", - value="value_example", - ), - ], - prefix="prefix_example", - ), - group_version_resource=GroupVersionResource( - group="group_example", - resource="resource_example", - version="version_example", - ), - metadata={ - "key": "key_example", - }, - namespace="namespace_example", - ), - }, - service=IoArgoprojEventsV1alpha1Service( - cluster_ip="cluster_ip_example", - ports=[ - ServicePort( - app_protocol="app_protocol_example", - name="name_example", - node_port=1, - port=1, - protocol="SCTP", - target_port="target_port_example", - ), - ], - ), - sftp={ - "key": IoArgoprojEventsV1alpha1SFTPEventSource( - address=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - event_type="event_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - poll_interval_duration="poll_interval_duration_example", - ssh_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( - directory="directory_example", - path="path_example", - path_regexp="path_regexp_example", - ), - ), - }, - slack={ - "key": IoArgoprojEventsV1alpha1SlackEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - signing_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - sns={ - "key": IoArgoprojEventsV1alpha1SNSEventSource( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - metadata={ - "key": "key_example", - }, - region="region_example", - role_arn="role_arn_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - topic_arn="topic_arn_example", - validate_signature=True, - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - sqs={ - "key": IoArgoprojEventsV1alpha1SQSEventSource( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - dlq=True, - endpoint="endpoint_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - json_body=True, - metadata={ - "key": "key_example", - }, - queue="queue_example", - queue_account_id="queue_account_id_example", - region="region_example", - role_arn="role_arn_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - session_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - wait_time_seconds="wait_time_seconds_example", - ), - }, - storage_grid={ - "key": IoArgoprojEventsV1alpha1StorageGridEventSource( - api_url="api_url_example", - auth_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1StorageGridFilter( - prefix="prefix_example", - suffix="suffix_example", - ), - metadata={ - "key": "key_example", - }, - region="region_example", - topic_arn="topic_arn_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - stripe={ - "key": IoArgoprojEventsV1alpha1StripeEventSource( - api_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_webhook=True, - event_filter=[ - "event_filter_example", - ], - metadata={ - "key": "key_example", - }, - webhook=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - template=IoArgoprojEventsV1alpha1Template( - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metadata=IoArgoprojEventsV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - node_selector={ - "key": "key_example", - }, - priority=1, - priority_class_name="priority_class_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - webhook={ - "key": IoArgoprojEventsV1alpha1WebhookEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( - expression="expression_example", - ), - webhook_context=IoArgoprojEventsV1alpha1WebhookContext( - auth_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - max_payload_size="max_payload_size_example", - metadata={ - "key": "key_example", - }, - method="method_example", - port="port_example", - server_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - server_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - ), - ), - }, - ), - status=IoArgoprojEventsV1alpha1EventSourceStatus( - status=IoArgoprojEventsV1alpha1Status( - conditions=[ - IoArgoprojEventsV1alpha1Condition( - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="type_example", - ), - ], - ), - ), - ), - name="name_example", - namespace="namespace_example", - ) # EventsourceUpdateEventSourceRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.EventSourceServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.EventsourceUpdateEventSourceRequest() # EventsourceUpdateEventSourceRequest | + try: api_response = api_instance.update_event_source(namespace, name, body) + print("The response of EventSourceServiceApi->update_event_source:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling EventSourceServiceApi->update_event_source: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**EventsourceUpdateEventSourceRequest**](EventsourceUpdateEventSourceRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**EventsourceUpdateEventSourceRequest**](EventsourceUpdateEventSourceRequest.md)| | ### Return type @@ -5403,7 +535,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -5414,7 +545,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **watch_event_sources** -> StreamResultOfEventsourceEventSourceWatchEvent watch_event_sources(namespace) +> StreamResultOfEventsourceEventSourceWatchEvent watch_event_sources(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) @@ -5423,12 +554,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import event_source_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_eventsource_event_source_watch_event import StreamResultOfEventsourceEventSourceWatchEvent +from argo_workflows.models.stream_result_of_eventsource_event_source_watch_event import StreamResultOfEventsourceEventSourceWatchEvent +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -5441,7 +571,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -5449,49 +579,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = event_source_service_api.EventSourceServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.EventSourceServiceApi(api_client) + namespace = 'namespace_example' # str | + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.watch_event_sources(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling EventSourceServiceApi->watch_event_sources: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.watch_event_sources(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) + print("The response of EventSourceServiceApi->watch_event_sources:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling EventSourceServiceApi->watch_event_sources: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **namespace** | **str**| | + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] ### Return type @@ -5506,7 +630,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/docs/EventsourceCreateEventSourceRequest.md b/sdks/python/client/docs/EventsourceCreateEventSourceRequest.md index 630497a18728..8578bf32085e 100644 --- a/sdks/python/client/docs/EventsourceCreateEventSourceRequest.md +++ b/sdks/python/client/docs/EventsourceCreateEventSourceRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **event_source** | [**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of EventsourceCreateEventSourceRequest from a JSON string +eventsource_create_event_source_request_instance = EventsourceCreateEventSourceRequest.from_json(json) +# print the JSON string representation of the object +print(EventsourceCreateEventSourceRequest.to_json()) + +# convert the object into a dict +eventsource_create_event_source_request_dict = eventsource_create_event_source_request_instance.to_dict() +# create an instance of EventsourceCreateEventSourceRequest from a dict +eventsource_create_event_source_request_form_dict = eventsource_create_event_source_request.from_dict(eventsource_create_event_source_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EventsourceEventSourceWatchEvent.md b/sdks/python/client/docs/EventsourceEventSourceWatchEvent.md index 3e1f9c6bbddf..7169c59a5467 100644 --- a/sdks/python/client/docs/EventsourceEventSourceWatchEvent.md +++ b/sdks/python/client/docs/EventsourceEventSourceWatchEvent.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **object** | [**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] **type** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.eventsource_event_source_watch_event import EventsourceEventSourceWatchEvent + +# TODO update the JSON string below +json = "{}" +# create an instance of EventsourceEventSourceWatchEvent from a JSON string +eventsource_event_source_watch_event_instance = EventsourceEventSourceWatchEvent.from_json(json) +# print the JSON string representation of the object +print(EventsourceEventSourceWatchEvent.to_json()) + +# convert the object into a dict +eventsource_event_source_watch_event_dict = eventsource_event_source_watch_event_instance.to_dict() +# create an instance of EventsourceEventSourceWatchEvent from a dict +eventsource_event_source_watch_event_form_dict = eventsource_event_source_watch_event.from_dict(eventsource_event_source_watch_event_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EventsourceLogEntry.md b/sdks/python/client/docs/EventsourceLogEntry.md index 8d569f5b0384..31c2eaf77a76 100644 --- a/sdks/python/client/docs/EventsourceLogEntry.md +++ b/sdks/python/client/docs/EventsourceLogEntry.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **event_name** | **str** | | [optional] @@ -11,8 +12,24 @@ Name | Type | Description | Notes **msg** | **str** | | [optional] **namespace** | **str** | | [optional] **time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.eventsource_log_entry import EventsourceLogEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of EventsourceLogEntry from a JSON string +eventsource_log_entry_instance = EventsourceLogEntry.from_json(json) +# print the JSON string representation of the object +print(EventsourceLogEntry.to_json()) + +# convert the object into a dict +eventsource_log_entry_dict = eventsource_log_entry_instance.to_dict() +# create an instance of EventsourceLogEntry from a dict +eventsource_log_entry_form_dict = eventsource_log_entry.from_dict(eventsource_log_entry_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/EventsourceUpdateEventSourceRequest.md b/sdks/python/client/docs/EventsourceUpdateEventSourceRequest.md index b6fb65c1a577..c13dd6739d10 100644 --- a/sdks/python/client/docs/EventsourceUpdateEventSourceRequest.md +++ b/sdks/python/client/docs/EventsourceUpdateEventSourceRequest.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **event_source** | [**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] **name** | **str** | | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of EventsourceUpdateEventSourceRequest from a JSON string +eventsource_update_event_source_request_instance = EventsourceUpdateEventSourceRequest.from_json(json) +# print the JSON string representation of the object +print(EventsourceUpdateEventSourceRequest.to_json()) + +# convert the object into a dict +eventsource_update_event_source_request_dict = eventsource_update_event_source_request_instance.to_dict() +# create an instance of EventsourceUpdateEventSourceRequest from a dict +eventsource_update_event_source_request_form_dict = eventsource_update_event_source_request.from_dict(eventsource_update_event_source_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ExecAction.md b/sdks/python/client/docs/ExecAction.md index e0a9b9f865b3..3e9ba13564de 100644 --- a/sdks/python/client/docs/ExecAction.md +++ b/sdks/python/client/docs/ExecAction.md @@ -3,11 +3,28 @@ ExecAction describes a \"run in container\" action. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**command** | **[str]** | Command is the command line to execute inside the container, the working directory for the command is root ('/') in the container's filesystem. The command is simply exec'd, it is not run inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and non-zero is unhealthy. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**command** | **List[str]** | Command is the command line to execute inside the container, the working directory for the command is root ('/') in the container's filesystem. The command is simply exec'd, it is not run inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and non-zero is unhealthy. | [optional] + +## Example + +```python +from argo_workflows.models.exec_action import ExecAction + +# TODO update the JSON string below +json = "{}" +# create an instance of ExecAction from a JSON string +exec_action_instance = ExecAction.from_json(json) +# print the JSON string representation of the object +print(ExecAction.to_json()) +# convert the object into a dict +exec_action_dict = exec_action_instance.to_dict() +# create an instance of ExecAction from a dict +exec_action_form_dict = exec_action.from_dict(exec_action_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/FCVolumeSource.md b/sdks/python/client/docs/FCVolumeSource.md index 1e43db4e774d..0b225f00c429 100644 --- a/sdks/python/client/docs/FCVolumeSource.md +++ b/sdks/python/client/docs/FCVolumeSource.md @@ -3,15 +3,32 @@ Represents a Fibre Channel volume. Fibre Channel volumes can only be mounted as read/write once. Fibre Channel volumes support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **fs_type** | **str** | Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. | [optional] **lun** | **int** | Optional: FC target lun number | [optional] **read_only** | **bool** | Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. | [optional] -**target_wwns** | **[str]** | Optional: FC target worldwide names (WWNs) | [optional] -**wwids** | **[str]** | Optional: FC volume world wide identifiers (wwids) Either wwids or combination of targetWWNs and lun must be set, but not both simultaneously. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**target_wwns** | **List[str]** | Optional: FC target worldwide names (WWNs) | [optional] +**wwids** | **List[str]** | Optional: FC volume world wide identifiers (wwids) Either wwids or combination of targetWWNs and lun must be set, but not both simultaneously. | [optional] + +## Example + +```python +from argo_workflows.models.fc_volume_source import FCVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of FCVolumeSource from a JSON string +fc_volume_source_instance = FCVolumeSource.from_json(json) +# print the JSON string representation of the object +print(FCVolumeSource.to_json()) +# convert the object into a dict +fc_volume_source_dict = fc_volume_source_instance.to_dict() +# create an instance of FCVolumeSource from a dict +fc_volume_source_form_dict = fc_volume_source.from_dict(fc_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/FlexVolumeSource.md b/sdks/python/client/docs/FlexVolumeSource.md index 8d4b2213a2dd..6521477e37b6 100644 --- a/sdks/python/client/docs/FlexVolumeSource.md +++ b/sdks/python/client/docs/FlexVolumeSource.md @@ -3,15 +3,32 @@ FlexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **driver** | **str** | Driver is the name of the driver to use for this volume. | **fs_type** | **str** | Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". The default filesystem depends on FlexVolume script. | [optional] -**options** | **{str: (str,)}** | Optional: Extra command options if any. | [optional] +**options** | **Dict[str, str]** | Optional: Extra command options if any. | [optional] **read_only** | **bool** | Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. | [optional] **secret_ref** | [**LocalObjectReference**](LocalObjectReference.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.flex_volume_source import FlexVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of FlexVolumeSource from a JSON string +flex_volume_source_instance = FlexVolumeSource.from_json(json) +# print the JSON string representation of the object +print(FlexVolumeSource.to_json()) + +# convert the object into a dict +flex_volume_source_dict = flex_volume_source_instance.to_dict() +# create an instance of FlexVolumeSource from a dict +flex_volume_source_form_dict = flex_volume_source.from_dict(flex_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/FlockerVolumeSource.md b/sdks/python/client/docs/FlockerVolumeSource.md index 8b8e02f2ab87..39c1eeff8c04 100644 --- a/sdks/python/client/docs/FlockerVolumeSource.md +++ b/sdks/python/client/docs/FlockerVolumeSource.md @@ -3,12 +3,29 @@ Represents a Flocker volume mounted by the Flocker agent. One and only one of datasetName and datasetUUID should be set. Flocker volumes do not support ownership management or SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **dataset_name** | **str** | Name of the dataset stored as metadata -> name on the dataset for Flocker should be considered as deprecated | [optional] **dataset_uuid** | **str** | UUID of the dataset. This is unique identifier of a Flocker dataset | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.flocker_volume_source import FlockerVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of FlockerVolumeSource from a JSON string +flocker_volume_source_instance = FlockerVolumeSource.from_json(json) +# print the JSON string representation of the object +print(FlockerVolumeSource.to_json()) + +# convert the object into a dict +flocker_volume_source_dict = flocker_volume_source_instance.to_dict() +# create an instance of FlockerVolumeSource from a dict +flocker_volume_source_form_dict = flocker_volume_source.from_dict(flocker_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GCEPersistentDiskVolumeSource.md b/sdks/python/client/docs/GCEPersistentDiskVolumeSource.md index 4799162ea133..46feea102792 100644 --- a/sdks/python/client/docs/GCEPersistentDiskVolumeSource.md +++ b/sdks/python/client/docs/GCEPersistentDiskVolumeSource.md @@ -3,14 +3,31 @@ Represents a Persistent Disk resource in Google Compute Engine. A GCE PD must exist before mounting to a container. The disk must also be in the same GCE project and zone as the kubelet. A GCE PD can only be mounted as read/write once or read-only many times. GCE PDs support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**pd_name** | **str** | Unique name of the PD resource in GCE. Used to identify the disk in GCE. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk | **fs_type** | **str** | Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk | [optional] **partition** | **int** | The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty). More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk | [optional] +**pd_name** | **str** | Unique name of the PD resource in GCE. Used to identify the disk in GCE. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk | **read_only** | **bool** | ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.gce_persistent_disk_volume_source import GCEPersistentDiskVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of GCEPersistentDiskVolumeSource from a JSON string +gce_persistent_disk_volume_source_instance = GCEPersistentDiskVolumeSource.from_json(json) +# print the JSON string representation of the object +print(GCEPersistentDiskVolumeSource.to_json()) + +# convert the object into a dict +gce_persistent_disk_volume_source_dict = gce_persistent_disk_volume_source_instance.to_dict() +# create an instance of GCEPersistentDiskVolumeSource from a dict +gce_persistent_disk_volume_source_form_dict = gce_persistent_disk_volume_source.from_dict(gce_persistent_disk_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GRPCAction.md b/sdks/python/client/docs/GRPCAction.md index 306d6237cc05..6ae69d9a921f 100644 --- a/sdks/python/client/docs/GRPCAction.md +++ b/sdks/python/client/docs/GRPCAction.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **port** | **int** | Port number of the gRPC service. Number must be in the range 1 to 65535. | **service** | **str** | Service is the name of the service to place in the gRPC HealthCheckRequest (see https://github.com/grpc/grpc/blob/master/doc/health-checking.md). If this is not specified, the default behavior is defined by gRPC. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.grpc_action import GRPCAction + +# TODO update the JSON string below +json = "{}" +# create an instance of GRPCAction from a JSON string +grpc_action_instance = GRPCAction.from_json(json) +# print the JSON string representation of the object +print(GRPCAction.to_json()) + +# convert the object into a dict +grpc_action_dict = grpc_action_instance.to_dict() +# create an instance of GRPCAction from a dict +grpc_action_form_dict = grpc_action.from_dict(grpc_action_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GitRepoVolumeSource.md b/sdks/python/client/docs/GitRepoVolumeSource.md index 4574d2b428d2..9fe5fb813c36 100644 --- a/sdks/python/client/docs/GitRepoVolumeSource.md +++ b/sdks/python/client/docs/GitRepoVolumeSource.md @@ -3,13 +3,30 @@ Represents a volume that is populated with the contents of a git repository. Git repo volumes do not support ownership management. Git repo volumes support SELinux relabeling. DEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**repository** | **str** | Repository URL | **directory** | **str** | Target directory name. Must not contain or start with '..'. If '.' is supplied, the volume directory will be the git repository. Otherwise, if specified, the volume will contain the git repository in the subdirectory with the given name. | [optional] +**repository** | **str** | Repository URL | **revision** | **str** | Commit hash for the specified revision. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.git_repo_volume_source import GitRepoVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of GitRepoVolumeSource from a JSON string +git_repo_volume_source_instance = GitRepoVolumeSource.from_json(json) +# print the JSON string representation of the object +print(GitRepoVolumeSource.to_json()) + +# convert the object into a dict +git_repo_volume_source_dict = git_repo_volume_source_instance.to_dict() +# create an instance of GitRepoVolumeSource from a dict +git_repo_volume_source_form_dict = git_repo_volume_source.from_dict(git_repo_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GlusterfsVolumeSource.md b/sdks/python/client/docs/GlusterfsVolumeSource.md index 6b41f3f47039..2b6c1243c167 100644 --- a/sdks/python/client/docs/GlusterfsVolumeSource.md +++ b/sdks/python/client/docs/GlusterfsVolumeSource.md @@ -3,13 +3,30 @@ Represents a Glusterfs mount that lasts the lifetime of a pod. Glusterfs volumes do not support ownership management or SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **endpoints** | **str** | EndpointsName is the endpoint name that details Glusterfs topology. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod | **path** | **str** | Path is the Glusterfs volume path. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod | **read_only** | **bool** | ReadOnly here will force the Glusterfs volume to be mounted with read-only permissions. Defaults to false. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.glusterfs_volume_source import GlusterfsVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of GlusterfsVolumeSource from a JSON string +glusterfs_volume_source_instance = GlusterfsVolumeSource.from_json(json) +# print the JSON string representation of the object +print(GlusterfsVolumeSource.to_json()) + +# convert the object into a dict +glusterfs_volume_source_dict = glusterfs_volume_source_instance.to_dict() +# create an instance of GlusterfsVolumeSource from a dict +glusterfs_volume_source_form_dict = glusterfs_volume_source.from_dict(glusterfs_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GoogleProtobufAny.md b/sdks/python/client/docs/GoogleProtobufAny.md index 5e3c3fb4b436..642169e8ce52 100644 --- a/sdks/python/client/docs/GoogleProtobufAny.md +++ b/sdks/python/client/docs/GoogleProtobufAny.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **type_url** | **str** | | [optional] -**value** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**value** | **bytearray** | | [optional] + +## Example + +```python +from argo_workflows.models.google_protobuf_any import GoogleProtobufAny + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleProtobufAny from a JSON string +google_protobuf_any_instance = GoogleProtobufAny.from_json(json) +# print the JSON string representation of the object +print(GoogleProtobufAny.to_json()) +# convert the object into a dict +google_protobuf_any_dict = google_protobuf_any_instance.to_dict() +# create an instance of GoogleProtobufAny from a dict +google_protobuf_any_form_dict = google_protobuf_any.from_dict(google_protobuf_any_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GroupVersionResource.md b/sdks/python/client/docs/GroupVersionResource.md index 4139c8a7aaba..3ba1c334132f 100644 --- a/sdks/python/client/docs/GroupVersionResource.md +++ b/sdks/python/client/docs/GroupVersionResource.md @@ -3,13 +3,30 @@ +protobuf.options.(gogoproto.goproto_stringer)=false ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **group** | **str** | | [optional] **resource** | **str** | | [optional] **version** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.group_version_resource import GroupVersionResource + +# TODO update the JSON string below +json = "{}" +# create an instance of GroupVersionResource from a JSON string +group_version_resource_instance = GroupVersionResource.from_json(json) +# print the JSON string representation of the object +print(GroupVersionResource.to_json()) + +# convert the object into a dict +group_version_resource_dict = group_version_resource_instance.to_dict() +# create an instance of GroupVersionResource from a dict +group_version_resource_form_dict = group_version_resource.from_dict(group_version_resource_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GrpcGatewayRuntimeError.md b/sdks/python/client/docs/GrpcGatewayRuntimeError.md index 05524aff81ec..99b9428ab3ba 100644 --- a/sdks/python/client/docs/GrpcGatewayRuntimeError.md +++ b/sdks/python/client/docs/GrpcGatewayRuntimeError.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **code** | **int** | | [optional] -**details** | [**[GoogleProtobufAny]**](GoogleProtobufAny.md) | | [optional] +**details** | [**List[GoogleProtobufAny]**](GoogleProtobufAny.md) | | [optional] **error** | **str** | | [optional] **message** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.grpc_gateway_runtime_error import GrpcGatewayRuntimeError + +# TODO update the JSON string below +json = "{}" +# create an instance of GrpcGatewayRuntimeError from a JSON string +grpc_gateway_runtime_error_instance = GrpcGatewayRuntimeError.from_json(json) +# print the JSON string representation of the object +print(GrpcGatewayRuntimeError.to_json()) + +# convert the object into a dict +grpc_gateway_runtime_error_dict = grpc_gateway_runtime_error_instance.to_dict() +# create an instance of GrpcGatewayRuntimeError from a dict +grpc_gateway_runtime_error_form_dict = grpc_gateway_runtime_error.from_dict(grpc_gateway_runtime_error_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GrpcGatewayRuntimeStreamError.md b/sdks/python/client/docs/GrpcGatewayRuntimeStreamError.md index f3293f1669cf..9b806de1024c 100644 --- a/sdks/python/client/docs/GrpcGatewayRuntimeStreamError.md +++ b/sdks/python/client/docs/GrpcGatewayRuntimeStreamError.md @@ -2,15 +2,32 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**details** | [**[GoogleProtobufAny]**](GoogleProtobufAny.md) | | [optional] +**details** | [**List[GoogleProtobufAny]**](GoogleProtobufAny.md) | | [optional] **grpc_code** | **int** | | [optional] **http_code** | **int** | | [optional] **http_status** | **str** | | [optional] **message** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.grpc_gateway_runtime_stream_error import GrpcGatewayRuntimeStreamError + +# TODO update the JSON string below +json = "{}" +# create an instance of GrpcGatewayRuntimeStreamError from a JSON string +grpc_gateway_runtime_stream_error_instance = GrpcGatewayRuntimeStreamError.from_json(json) +# print the JSON string representation of the object +print(GrpcGatewayRuntimeStreamError.to_json()) + +# convert the object into a dict +grpc_gateway_runtime_stream_error_dict = grpc_gateway_runtime_stream_error_instance.to_dict() +# create an instance of GrpcGatewayRuntimeStreamError from a dict +grpc_gateway_runtime_stream_error_form_dict = grpc_gateway_runtime_stream_error.from_dict(grpc_gateway_runtime_stream_error_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/HTTPGetAction.md b/sdks/python/client/docs/HTTPGetAction.md index cbf0219232f2..043eca014a4f 100644 --- a/sdks/python/client/docs/HTTPGetAction.md +++ b/sdks/python/client/docs/HTTPGetAction.md @@ -3,15 +3,32 @@ HTTPGetAction describes an action based on HTTP Get requests. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**port** | **str** | | **host** | **str** | Host name to connect to, defaults to the pod IP. You probably want to set \"Host\" in httpHeaders instead. | [optional] -**http_headers** | [**[HTTPHeader]**](HTTPHeader.md) | Custom headers to set in the request. HTTP allows repeated headers. | [optional] +**http_headers** | [**List[HTTPHeader]**](HTTPHeader.md) | Custom headers to set in the request. HTTP allows repeated headers. | [optional] **path** | **str** | Path to access on the HTTP server. | [optional] +**port** | **str** | | **scheme** | **str** | Scheme to use for connecting to the host. Defaults to HTTP. Possible enum values: - `\"HTTP\"` means that the scheme used will be http:// - `\"HTTPS\"` means that the scheme used will be https:// | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.http_get_action import HTTPGetAction + +# TODO update the JSON string below +json = "{}" +# create an instance of HTTPGetAction from a JSON string +http_get_action_instance = HTTPGetAction.from_json(json) +# print the JSON string representation of the object +print(HTTPGetAction.to_json()) + +# convert the object into a dict +http_get_action_dict = http_get_action_instance.to_dict() +# create an instance of HTTPGetAction from a dict +http_get_action_form_dict = http_get_action.from_dict(http_get_action_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/HTTPHeader.md b/sdks/python/client/docs/HTTPHeader.md index c25100fd0554..9549200d703c 100644 --- a/sdks/python/client/docs/HTTPHeader.md +++ b/sdks/python/client/docs/HTTPHeader.md @@ -3,12 +3,29 @@ HTTPHeader describes a custom header to be used in HTTP probes ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | The header field name | **value** | **str** | The header field value | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.http_header import HTTPHeader + +# TODO update the JSON string below +json = "{}" +# create an instance of HTTPHeader from a JSON string +http_header_instance = HTTPHeader.from_json(json) +# print the JSON string representation of the object +print(HTTPHeader.to_json()) + +# convert the object into a dict +http_header_dict = http_header_instance.to_dict() +# create an instance of HTTPHeader from a dict +http_header_form_dict = http_header.from_dict(http_header_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/HostAlias.md b/sdks/python/client/docs/HostAlias.md index 9cf5e9ea7c87..831f036f1e3c 100644 --- a/sdks/python/client/docs/HostAlias.md +++ b/sdks/python/client/docs/HostAlias.md @@ -3,12 +3,29 @@ HostAlias holds the mapping between IP and hostnames that will be injected as an entry in the pod's hosts file. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**hostnames** | **[str]** | Hostnames for the above IP address. | [optional] +**hostnames** | **List[str]** | Hostnames for the above IP address. | [optional] **ip** | **str** | IP address of the host file entry. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.host_alias import HostAlias + +# TODO update the JSON string below +json = "{}" +# create an instance of HostAlias from a JSON string +host_alias_instance = HostAlias.from_json(json) +# print the JSON string representation of the object +print(HostAlias.to_json()) + +# convert the object into a dict +host_alias_dict = host_alias_instance.to_dict() +# create an instance of HostAlias from a dict +host_alias_form_dict = host_alias.from_dict(host_alias_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/HostPathVolumeSource.md b/sdks/python/client/docs/HostPathVolumeSource.md index faf829cccfd7..8b63bb703d00 100644 --- a/sdks/python/client/docs/HostPathVolumeSource.md +++ b/sdks/python/client/docs/HostPathVolumeSource.md @@ -3,12 +3,29 @@ Represents a host path mapped into a pod. Host path volumes do not support ownership management or SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **path** | **str** | Path of the directory on the host. If the path is a symlink, it will follow the link to the real path. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath | **type** | **str** | Type for HostPath Volume Defaults to \"\" More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.host_path_volume_source import HostPathVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of HostPathVolumeSource from a JSON string +host_path_volume_source_instance = HostPathVolumeSource.from_json(json) +# print the JSON string representation of the object +print(HostPathVolumeSource.to_json()) + +# convert the object into a dict +host_path_volume_source_dict = host_path_volume_source_instance.to_dict() +# create an instance of HostPathVolumeSource from a dict +host_path_volume_source_form_dict = host_path_volume_source.from_dict(host_path_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ISCSIVolumeSource.md b/sdks/python/client/docs/ISCSIVolumeSource.md index a3102c67c3d4..203897d55377 100644 --- a/sdks/python/client/docs/ISCSIVolumeSource.md +++ b/sdks/python/client/docs/ISCSIVolumeSource.md @@ -3,21 +3,38 @@ Represents an ISCSI disk. ISCSI volumes can only be mounted as read/write once. ISCSI volumes support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**iqn** | **str** | Target iSCSI Qualified Name. | -**lun** | **int** | iSCSI Target Lun number. | -**target_portal** | **str** | iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). | **chap_auth_discovery** | **bool** | whether support iSCSI Discovery CHAP authentication | [optional] **chap_auth_session** | **bool** | whether support iSCSI Session CHAP authentication | [optional] **fs_type** | **str** | Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi | [optional] **initiator_name** | **str** | Custom iSCSI Initiator Name. If initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface <target portal>:<volume name> will be created for the connection. | [optional] +**iqn** | **str** | Target iSCSI Qualified Name. | **iscsi_interface** | **str** | iSCSI Interface Name that uses an iSCSI transport. Defaults to 'default' (tcp). | [optional] -**portals** | **[str]** | iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). | [optional] +**lun** | **int** | iSCSI Target Lun number. | +**portals** | **List[str]** | iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). | [optional] **read_only** | **bool** | ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. | [optional] **secret_ref** | [**LocalObjectReference**](LocalObjectReference.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**target_portal** | **str** | iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port is other than default (typically TCP ports 860 and 3260). | + +## Example + +```python +from argo_workflows.models.iscsi_volume_source import ISCSIVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of ISCSIVolumeSource from a JSON string +iscsi_volume_source_instance = ISCSIVolumeSource.from_json(json) +# print the JSON string representation of the object +print(ISCSIVolumeSource.to_json()) +# convert the object into a dict +iscsi_volume_source_dict = iscsi_volume_source_instance.to_dict() +# create an instance of ISCSIVolumeSource from a dict +iscsi_volume_source_form_dict = iscsi_volume_source.from_dict(iscsi_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/InfoServiceApi.md b/sdks/python/client/docs/InfoServiceApi.md index 6d61c4cf5707..b653be8e35f4 100644 --- a/sdks/python/client/docs/InfoServiceApi.md +++ b/sdks/python/client/docs/InfoServiceApi.md @@ -11,7 +11,7 @@ Method | HTTP request | Description # **collect_event** -> bool, date, datetime, dict, float, int, list, str, none_type collect_event(body) +> object collect_event(body) @@ -20,12 +20,11 @@ Method | HTTP request | Description * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import info_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -38,7 +37,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -46,29 +45,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = info_service_api.InfoServiceApi(api_client) - body = IoArgoprojWorkflowV1alpha1CollectEventRequest( - name="name_example", - ) # IoArgoprojWorkflowV1alpha1CollectEventRequest | + api_instance = argo_workflows.InfoServiceApi(api_client) + body = argo_workflows.IoArgoprojWorkflowV1alpha1CollectEventRequest() # IoArgoprojWorkflowV1alpha1CollectEventRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.collect_event(body) + print("The response of InfoServiceApi->collect_event:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling InfoServiceApi->collect_event: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**IoArgoprojWorkflowV1alpha1CollectEventRequest**](IoArgoprojWorkflowV1alpha1CollectEventRequest.md)| | + **body** | [**IoArgoprojWorkflowV1alpha1CollectEventRequest**](IoArgoprojWorkflowV1alpha1CollectEventRequest.md)| | ### Return type -**bool, date, datetime, dict, float, int, list, str, none_type** +**object** ### Authorization @@ -79,7 +78,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -99,12 +97,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import info_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_info_response import IoArgoprojWorkflowV1alpha1InfoResponse +from argo_workflows.models.io_argoproj_workflow_v1alpha1_info_response import IoArgoprojWorkflowV1alpha1InfoResponse +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -117,7 +114,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -125,18 +122,20 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = info_service_api.InfoServiceApi(api_client) + api_instance = argo_workflows.InfoServiceApi(api_client) - # example, this endpoint has no required or optional parameters try: api_response = api_instance.get_info() + print("The response of InfoServiceApi->get_info:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling InfoServiceApi->get_info: %s\n" % e) ``` + ### Parameters + This endpoint does not need any parameter. ### Return type @@ -152,7 +151,6 @@ This endpoint does not need any parameter. - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -172,12 +170,11 @@ This endpoint does not need any parameter. * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import info_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_get_user_info_response import IoArgoprojWorkflowV1alpha1GetUserInfoResponse +from argo_workflows.models.io_argoproj_workflow_v1alpha1_get_user_info_response import IoArgoprojWorkflowV1alpha1GetUserInfoResponse +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -190,7 +187,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -198,18 +195,20 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = info_service_api.InfoServiceApi(api_client) + api_instance = argo_workflows.InfoServiceApi(api_client) - # example, this endpoint has no required or optional parameters try: api_response = api_instance.get_user_info() + print("The response of InfoServiceApi->get_user_info:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling InfoServiceApi->get_user_info: %s\n" % e) ``` + ### Parameters + This endpoint does not need any parameter. ### Return type @@ -225,7 +224,6 @@ This endpoint does not need any parameter. - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -245,12 +243,11 @@ This endpoint does not need any parameter. * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import info_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_version import IoArgoprojWorkflowV1alpha1Version +from argo_workflows.models.io_argoproj_workflow_v1alpha1_version import IoArgoprojWorkflowV1alpha1Version +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -263,7 +260,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -271,18 +268,20 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = info_service_api.InfoServiceApi(api_client) + api_instance = argo_workflows.InfoServiceApi(api_client) - # example, this endpoint has no required or optional parameters try: api_response = api_instance.get_version() + print("The response of InfoServiceApi->get_version:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling InfoServiceApi->get_version: %s\n" % e) ``` + ### Parameters + This endpoint does not need any parameter. ### Return type @@ -298,7 +297,6 @@ This endpoint does not need any parameter. - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md index 409b019423d0..78ad3a4375f0 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auto_ack** | **bool** | | [optional] @@ -9,8 +10,24 @@ Name | Type | Description | Notes **exclusive** | **bool** | | [optional] **no_local** | **bool** | | [optional] **no_wait** | **bool** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_consume_config import IoArgoprojEventsV1alpha1AMQPConsumeConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AMQPConsumeConfig from a JSON string +io_argoproj_events_v1alpha1_amqp_consume_config_instance = IoArgoprojEventsV1alpha1AMQPConsumeConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AMQPConsumeConfig.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_amqp_consume_config_dict = io_argoproj_events_v1alpha1_amqp_consume_config_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AMQPConsumeConfig from a dict +io_argoproj_events_v1alpha1_amqp_consume_config_form_dict = io_argoproj_events_v1alpha1_amqp_consume_config.from_dict(io_argoproj_events_v1alpha1_amqp_consume_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md index 3a2a7b9c774b..9b5207031c23 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] @@ -12,15 +13,31 @@ Name | Type | Description | Notes **exchange_type** | **str** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **queue_bind** | [**IoArgoprojEventsV1alpha1AMQPQueueBindConfig**](IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md) | | [optional] **queue_declare** | [**IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig**](IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md) | | [optional] **routing_key** | **str** | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **url** | **str** | | [optional] **url_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_event_source import IoArgoprojEventsV1alpha1AMQPEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AMQPEventSource from a JSON string +io_argoproj_events_v1alpha1_amqp_event_source_instance = IoArgoprojEventsV1alpha1AMQPEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AMQPEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_amqp_event_source_dict = io_argoproj_events_v1alpha1_amqp_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AMQPEventSource from a dict +io_argoproj_events_v1alpha1_amqp_event_source_form_dict = io_argoproj_events_v1alpha1_amqp_event_source.from_dict(io_argoproj_events_v1alpha1_amqp_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md index f3bf375aa3a2..249bfedecab4 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auto_delete** | **bool** | | [optional] **durable** | **bool** | | [optional] **internal** | **bool** | | [optional] **no_wait** | **bool** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_exchange_declare_config import IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig from a JSON string +io_argoproj_events_v1alpha1_amqp_exchange_declare_config_instance = IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_amqp_exchange_declare_config_dict = io_argoproj_events_v1alpha1_amqp_exchange_declare_config_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig from a dict +io_argoproj_events_v1alpha1_amqp_exchange_declare_config_form_dict = io_argoproj_events_v1alpha1_amqp_exchange_declare_config.from_dict(io_argoproj_events_v1alpha1_amqp_exchange_declare_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md index 4a2a26f3a896..595ffc08ae25 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **no_wait** | **bool** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_queue_bind_config import IoArgoprojEventsV1alpha1AMQPQueueBindConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AMQPQueueBindConfig from a JSON string +io_argoproj_events_v1alpha1_amqp_queue_bind_config_instance = IoArgoprojEventsV1alpha1AMQPQueueBindConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AMQPQueueBindConfig.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_amqp_queue_bind_config_dict = io_argoproj_events_v1alpha1_amqp_queue_bind_config_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AMQPQueueBindConfig from a dict +io_argoproj_events_v1alpha1_amqp_queue_bind_config_form_dict = io_argoproj_events_v1alpha1_amqp_queue_bind_config.from_dict(io_argoproj_events_v1alpha1_amqp_queue_bind_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md index 7ebd05a6b10e..f35053fe469c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **arguments** | **str** | | [optional] @@ -10,8 +11,24 @@ Name | Type | Description | Notes **exclusive** | **bool** | | [optional] **name** | **str** | | [optional] **no_wait** | **bool** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_amqp_queue_declare_config import IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig from a JSON string +io_argoproj_events_v1alpha1_amqp_queue_declare_config_instance = IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_amqp_queue_declare_config_dict = io_argoproj_events_v1alpha1_amqp_queue_declare_config_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig from a dict +io_argoproj_events_v1alpha1_amqp_queue_declare_config_form_dict = io_argoproj_events_v1alpha1_amqp_queue_declare_config.from_dict(io_argoproj_events_v1alpha1_amqp_queue_declare_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md index 2e45fe3532ca..8aee2f146f30 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md @@ -2,18 +2,35 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **function_name** | **str** | FunctionName refers to the name of the function to invoke. | [optional] **invocation_type** | **str** | Choose from the following options. * RequestResponse (default) - Invoke the function synchronously. Keep the connection open until the function returns a response or times out. The API response includes the function response and additional data. * Event - Invoke the function asynchronously. Send events that fail multiple times to the function's dead-letter queue (if it's configured). The API response only includes a status code. * DryRun - Validate parameter values and verify that the user or role has permission to invoke the function. +optional | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **region** | **str** | | [optional] **role_arn** | **str** | | [optional] **secret_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_aws_lambda_trigger import IoArgoprojEventsV1alpha1AWSLambdaTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AWSLambdaTrigger from a JSON string +io_argoproj_events_v1alpha1_aws_lambda_trigger_instance = IoArgoprojEventsV1alpha1AWSLambdaTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AWSLambdaTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_aws_lambda_trigger_dict = io_argoproj_events_v1alpha1_aws_lambda_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AWSLambdaTrigger from a dict +io_argoproj_events_v1alpha1_aws_lambda_trigger_form_dict = io_argoproj_events_v1alpha1_aws_lambda_trigger.from_dict(io_argoproj_events_v1alpha1_aws_lambda_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Amount.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Amount.md index 22f4df50a6bc..adad4eceabd0 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Amount.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Amount.md @@ -3,11 +3,28 @@ Amount represent a numeric amount. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**value** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**value** | **bytearray** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_amount import IoArgoprojEventsV1alpha1Amount + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Amount from a JSON string +io_argoproj_events_v1alpha1_amount_instance = IoArgoprojEventsV1alpha1Amount.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Amount.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_amount_dict = io_argoproj_events_v1alpha1_amount_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Amount from a dict +io_argoproj_events_v1alpha1_amount_form_dict = io_argoproj_events_v1alpha1_amount.from_dict(io_argoproj_events_v1alpha1_amount_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md index 521eff0b0d48..3a75f473055d 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**args** | **[str]** | | [optional] +**args** | **List[str]** | | [optional] **operation** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] **source** | [**IoArgoprojEventsV1alpha1ArtifactLocation**](IoArgoprojEventsV1alpha1ArtifactLocation.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_argo_workflow_trigger import IoArgoprojEventsV1alpha1ArgoWorkflowTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1ArgoWorkflowTrigger from a JSON string +io_argoproj_events_v1alpha1_argo_workflow_trigger_instance = IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_argo_workflow_trigger_dict = io_argoproj_events_v1alpha1_argo_workflow_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1ArgoWorkflowTrigger from a dict +io_argoproj_events_v1alpha1_argo_workflow_trigger_form_dict = io_argoproj_events_v1alpha1_argo_workflow_trigger.from_dict(io_argoproj_events_v1alpha1_argo_workflow_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md index dfdff21498b5..32739042dd6f 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **configmap** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | [optional] @@ -11,8 +12,24 @@ Name | Type | Description | Notes **resource** | [**IoArgoprojEventsV1alpha1Resource**](IoArgoprojEventsV1alpha1Resource.md) | | [optional] **s3** | [**IoArgoprojEventsV1alpha1S3Artifact**](IoArgoprojEventsV1alpha1S3Artifact.md) | | [optional] **url** | [**IoArgoprojEventsV1alpha1URLArtifact**](IoArgoprojEventsV1alpha1URLArtifact.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1ArtifactLocation from a JSON string +io_argoproj_events_v1alpha1_artifact_location_instance = IoArgoprojEventsV1alpha1ArtifactLocation.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1ArtifactLocation.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_artifact_location_dict = io_argoproj_events_v1alpha1_artifact_location_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1ArtifactLocation from a dict +io_argoproj_events_v1alpha1_artifact_location_form_dict = io_argoproj_events_v1alpha1_artifact_location.from_dict(io_argoproj_events_v1alpha1_artifact_location_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md index e974a38ad480..b4152ddb7758 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md @@ -2,16 +2,33 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **fqdn** | **str** | | [optional] **hub_name** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **shared_access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **shared_access_key_name** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_event_hubs_trigger import IoArgoprojEventsV1alpha1AzureEventHubsTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AzureEventHubsTrigger from a JSON string +io_argoproj_events_v1alpha1_azure_event_hubs_trigger_instance = IoArgoprojEventsV1alpha1AzureEventHubsTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AzureEventHubsTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_azure_event_hubs_trigger_dict = io_argoproj_events_v1alpha1_azure_event_hubs_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AzureEventHubsTrigger from a dict +io_argoproj_events_v1alpha1_azure_event_hubs_trigger_form_dict = io_argoproj_events_v1alpha1_azure_event_hubs_trigger.from_dict(io_argoproj_events_v1alpha1_azure_event_hubs_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md index 56bb6b723a1a..ae95c904c5ad 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md @@ -2,16 +2,33 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **fqdn** | **str** | | [optional] **hub_name** | **str** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **shared_access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **shared_access_key_name** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_events_hub_event_source import IoArgoprojEventsV1alpha1AzureEventsHubEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AzureEventsHubEventSource from a JSON string +io_argoproj_events_v1alpha1_azure_events_hub_event_source_instance = IoArgoprojEventsV1alpha1AzureEventsHubEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AzureEventsHubEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_azure_events_hub_event_source_dict = io_argoproj_events_v1alpha1_azure_events_hub_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AzureEventsHubEventSource from a dict +io_argoproj_events_v1alpha1_azure_events_hub_event_source_form_dict = io_argoproj_events_v1alpha1_azure_events_hub_event_source.from_dict(io_argoproj_events_v1alpha1_azure_events_hub_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md index 28b04f2849d0..6fb69362fd6c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **connection_string** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] @@ -9,12 +10,28 @@ Name | Type | Description | Notes **dlq** | **bool** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **queue_name** | **str** | | [optional] **storage_account_name** | **str** | | [optional] **wait_time_in_seconds** | **int** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_queue_storage_event_source import IoArgoprojEventsV1alpha1AzureQueueStorageEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AzureQueueStorageEventSource from a JSON string +io_argoproj_events_v1alpha1_azure_queue_storage_event_source_instance = IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_azure_queue_storage_event_source_dict = io_argoproj_events_v1alpha1_azure_queue_storage_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AzureQueueStorageEventSource from a dict +io_argoproj_events_v1alpha1_azure_queue_storage_event_source_form_dict = io_argoproj_events_v1alpha1_azure_queue_storage_event_source.from_dict(io_argoproj_events_v1alpha1_azure_queue_storage_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md index feb5ea655d5a..5e92cfb62687 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md @@ -2,19 +2,36 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **connection_string** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **fully_qualified_namespace** | **str** | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **queue_name** | **str** | | [optional] **subscription_name** | **str** | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **topic_name** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_service_bus_event_source import IoArgoprojEventsV1alpha1AzureServiceBusEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AzureServiceBusEventSource from a JSON string +io_argoproj_events_v1alpha1_azure_service_bus_event_source_instance = IoArgoprojEventsV1alpha1AzureServiceBusEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AzureServiceBusEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_azure_service_bus_event_source_dict = io_argoproj_events_v1alpha1_azure_service_bus_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AzureServiceBusEventSource from a dict +io_argoproj_events_v1alpha1_azure_service_bus_event_source_form_dict = io_argoproj_events_v1alpha1_azure_service_bus_event_source.from_dict(io_argoproj_events_v1alpha1_azure_service_bus_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md index 3ace72346914..c046a86a94e5 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md @@ -2,17 +2,34 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **connection_string** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **queue_name** | **str** | | [optional] **subscription_name** | **str** | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **topic_name** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_azure_service_bus_trigger import IoArgoprojEventsV1alpha1AzureServiceBusTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1AzureServiceBusTrigger from a JSON string +io_argoproj_events_v1alpha1_azure_service_bus_trigger_instance = IoArgoprojEventsV1alpha1AzureServiceBusTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1AzureServiceBusTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_azure_service_bus_trigger_dict = io_argoproj_events_v1alpha1_azure_service_bus_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1AzureServiceBusTrigger from a dict +io_argoproj_events_v1alpha1_azure_service_bus_trigger_form_dict = io_argoproj_events_v1alpha1_azure_service_bus_trigger.from_dict(io_argoproj_events_v1alpha1_azure_service_bus_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Backoff.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Backoff.md index e164eab042bf..cc08f8099c7e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Backoff.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Backoff.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **duration** | [**IoArgoprojEventsV1alpha1Int64OrString**](IoArgoprojEventsV1alpha1Int64OrString.md) | | [optional] **factor** | [**IoArgoprojEventsV1alpha1Amount**](IoArgoprojEventsV1alpha1Amount.md) | | [optional] **jitter** | [**IoArgoprojEventsV1alpha1Amount**](IoArgoprojEventsV1alpha1Amount.md) | | [optional] **steps** | **int** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Backoff from a JSON string +io_argoproj_events_v1alpha1_backoff_instance = IoArgoprojEventsV1alpha1Backoff.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Backoff.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_backoff_dict = io_argoproj_events_v1alpha1_backoff_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Backoff from a dict +io_argoproj_events_v1alpha1_backoff_form_dict = io_argoproj_events_v1alpha1_backoff.from_dict(io_argoproj_events_v1alpha1_backoff_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md index 5bbabe5c2265..7cfe72237d30 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **username** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1BasicAuth from a JSON string +io_argoproj_events_v1alpha1_basic_auth_instance = IoArgoprojEventsV1alpha1BasicAuth.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1BasicAuth.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_basic_auth_dict = io_argoproj_events_v1alpha1_basic_auth_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1BasicAuth from a dict +io_argoproj_events_v1alpha1_basic_auth_form_dict = io_argoproj_events_v1alpha1_basic_auth.from_dict(io_argoproj_events_v1alpha1_basic_auth_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md index ae1fa15c976a..a08a82fedcf9 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **basic** | [**IoArgoprojEventsV1alpha1BitbucketBasicAuth**](IoArgoprojEventsV1alpha1BitbucketBasicAuth.md) | | [optional] **oauth_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_auth import IoArgoprojEventsV1alpha1BitbucketAuth + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1BitbucketAuth from a JSON string +io_argoproj_events_v1alpha1_bitbucket_auth_instance = IoArgoprojEventsV1alpha1BitbucketAuth.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1BitbucketAuth.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_bitbucket_auth_dict = io_argoproj_events_v1alpha1_bitbucket_auth_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1BitbucketAuth from a dict +io_argoproj_events_v1alpha1_bitbucket_auth_form_dict = io_argoproj_events_v1alpha1_bitbucket_auth.from_dict(io_argoproj_events_v1alpha1_bitbucket_auth_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md index 40784673659c..c9d3657210fc 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **username** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_basic_auth import IoArgoprojEventsV1alpha1BitbucketBasicAuth + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1BitbucketBasicAuth from a JSON string +io_argoproj_events_v1alpha1_bitbucket_basic_auth_instance = IoArgoprojEventsV1alpha1BitbucketBasicAuth.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1BitbucketBasicAuth.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_bitbucket_basic_auth_dict = io_argoproj_events_v1alpha1_bitbucket_basic_auth_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1BitbucketBasicAuth from a dict +io_argoproj_events_v1alpha1_bitbucket_basic_auth_form_dict = io_argoproj_events_v1alpha1_bitbucket_basic_auth.from_dict(io_argoproj_events_v1alpha1_bitbucket_basic_auth_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md index dc850e57011b..f2fa7a0a98bc 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md @@ -2,20 +2,37 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auth** | [**IoArgoprojEventsV1alpha1BitbucketAuth**](IoArgoprojEventsV1alpha1BitbucketAuth.md) | | [optional] **delete_hook_on_finish** | **bool** | | [optional] -**events** | **[str]** | Events this webhook is subscribed to. | [optional] +**events** | **List[str]** | Events this webhook is subscribed to. | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **owner** | **str** | | [optional] **project_key** | **str** | | [optional] -**repositories** | [**[IoArgoprojEventsV1alpha1BitbucketRepository]**](IoArgoprojEventsV1alpha1BitbucketRepository.md) | | [optional] +**repositories** | [**List[IoArgoprojEventsV1alpha1BitbucketRepository]**](IoArgoprojEventsV1alpha1BitbucketRepository.md) | | [optional] **repository_slug** | **str** | | [optional] **webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_event_source import IoArgoprojEventsV1alpha1BitbucketEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1BitbucketEventSource from a JSON string +io_argoproj_events_v1alpha1_bitbucket_event_source_instance = IoArgoprojEventsV1alpha1BitbucketEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1BitbucketEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_bitbucket_event_source_dict = io_argoproj_events_v1alpha1_bitbucket_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1BitbucketEventSource from a dict +io_argoproj_events_v1alpha1_bitbucket_event_source_form_dict = io_argoproj_events_v1alpha1_bitbucket_event_source.from_dict(io_argoproj_events_v1alpha1_bitbucket_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md index 29cfa7e94f4c..4249690ad57d 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **owner** | **str** | | [optional] **repository_slug** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_repository import IoArgoprojEventsV1alpha1BitbucketRepository + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1BitbucketRepository from a JSON string +io_argoproj_events_v1alpha1_bitbucket_repository_instance = IoArgoprojEventsV1alpha1BitbucketRepository.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1BitbucketRepository.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_bitbucket_repository_dict = io_argoproj_events_v1alpha1_bitbucket_repository_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1BitbucketRepository from a dict +io_argoproj_events_v1alpha1_bitbucket_repository_form_dict = io_argoproj_events_v1alpha1_bitbucket_repository.from_dict(io_argoproj_events_v1alpha1_bitbucket_repository_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md index 53eedc2725d6..4c522d85c269 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md @@ -2,22 +2,39 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **bitbucketserver_base_url** | **str** | | [optional] **delete_hook_on_finish** | **bool** | | [optional] -**events** | **[str]** | | [optional] +**events** | **List[str]** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **project_key** | **str** | | [optional] -**repositories** | [**[IoArgoprojEventsV1alpha1BitbucketServerRepository]**](IoArgoprojEventsV1alpha1BitbucketServerRepository.md) | | [optional] +**repositories** | [**List[IoArgoprojEventsV1alpha1BitbucketServerRepository]**](IoArgoprojEventsV1alpha1BitbucketServerRepository.md) | | [optional] **repository_slug** | **str** | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] **webhook_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_server_event_source import IoArgoprojEventsV1alpha1BitbucketServerEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1BitbucketServerEventSource from a JSON string +io_argoproj_events_v1alpha1_bitbucket_server_event_source_instance = IoArgoprojEventsV1alpha1BitbucketServerEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1BitbucketServerEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_bitbucket_server_event_source_dict = io_argoproj_events_v1alpha1_bitbucket_server_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1BitbucketServerEventSource from a dict +io_argoproj_events_v1alpha1_bitbucket_server_event_source_form_dict = io_argoproj_events_v1alpha1_bitbucket_server_event_source.from_dict(io_argoproj_events_v1alpha1_bitbucket_server_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md index 86ab4e4b55cf..d720c4c859a2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **project_key** | **str** | | [optional] **repository_slug** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_bitbucket_server_repository import IoArgoprojEventsV1alpha1BitbucketServerRepository + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1BitbucketServerRepository from a JSON string +io_argoproj_events_v1alpha1_bitbucket_server_repository_instance = IoArgoprojEventsV1alpha1BitbucketServerRepository.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1BitbucketServerRepository.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_bitbucket_server_repository_dict = io_argoproj_events_v1alpha1_bitbucket_server_repository_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1BitbucketServerRepository from a dict +io_argoproj_events_v1alpha1_bitbucket_server_repository_form_dict = io_argoproj_events_v1alpha1_bitbucket_server_repository.from_dict(io_argoproj_events_v1alpha1_bitbucket_server_repository_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md index 866b69ee7106..3e6c256b24be 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md @@ -2,17 +2,34 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**exclusion_dates** | **[str]** | ExclusionDates defines the list of DATE-TIME exceptions for recurring events. | [optional] +**exclusion_dates** | **List[str]** | ExclusionDates defines the list of DATE-TIME exceptions for recurring events. | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **interval** | **str** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **persistence** | [**IoArgoprojEventsV1alpha1EventPersistence**](IoArgoprojEventsV1alpha1EventPersistence.md) | | [optional] **schedule** | **str** | | [optional] **timezone** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_calendar_event_source import IoArgoprojEventsV1alpha1CalendarEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1CalendarEventSource from a JSON string +io_argoproj_events_v1alpha1_calendar_event_source_instance = IoArgoprojEventsV1alpha1CalendarEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1CalendarEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_calendar_event_source_dict = io_argoproj_events_v1alpha1_calendar_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1CalendarEventSource from a dict +io_argoproj_events_v1alpha1_calendar_event_source_form_dict = io_argoproj_events_v1alpha1_calendar_event_source.from_dict(io_argoproj_events_v1alpha1_calendar_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md index aea20471dcea..6995d7ff0eb7 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **enabled** | **bool** | | [optional] **max_duration** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_catchup_configuration import IoArgoprojEventsV1alpha1CatchupConfiguration + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1CatchupConfiguration from a JSON string +io_argoproj_events_v1alpha1_catchup_configuration_instance = IoArgoprojEventsV1alpha1CatchupConfiguration.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1CatchupConfiguration.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_catchup_configuration_dict = io_argoproj_events_v1alpha1_catchup_configuration_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1CatchupConfiguration from a dict +io_argoproj_events_v1alpha1_catchup_configuration_form_dict = io_argoproj_events_v1alpha1_catchup_configuration.from_dict(io_argoproj_events_v1alpha1_catchup_configuration_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Condition.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Condition.md index e99152449026..86d75e759ae3 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Condition.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Condition.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **last_transition_time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] @@ -9,8 +10,24 @@ Name | Type | Description | Notes **reason** | **str** | | [optional] **status** | **str** | | [optional] **type** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_condition import IoArgoprojEventsV1alpha1Condition + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Condition from a JSON string +io_argoproj_events_v1alpha1_condition_instance = IoArgoprojEventsV1alpha1Condition.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Condition.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_condition_dict = io_argoproj_events_v1alpha1_condition_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Condition from a dict +io_argoproj_events_v1alpha1_condition_form_dict = io_argoproj_events_v1alpha1_condition.from_dict(io_argoproj_events_v1alpha1_condition_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md index fb0076af3e7f..e9a1d74a143c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cron** | **str** | | [optional] **timezone** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_conditions_reset_by_time import IoArgoprojEventsV1alpha1ConditionsResetByTime + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1ConditionsResetByTime from a JSON string +io_argoproj_events_v1alpha1_conditions_reset_by_time_instance = IoArgoprojEventsV1alpha1ConditionsResetByTime.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1ConditionsResetByTime.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_conditions_reset_by_time_dict = io_argoproj_events_v1alpha1_conditions_reset_by_time_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1ConditionsResetByTime from a dict +io_argoproj_events_v1alpha1_conditions_reset_by_time_form_dict = io_argoproj_events_v1alpha1_conditions_reset_by_time.from_dict(io_argoproj_events_v1alpha1_conditions_reset_by_time_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md index 1e721e1eb847..3d6fd784cbcf 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **by_time** | [**IoArgoprojEventsV1alpha1ConditionsResetByTime**](IoArgoprojEventsV1alpha1ConditionsResetByTime.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_conditions_reset_criteria import IoArgoprojEventsV1alpha1ConditionsResetCriteria + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1ConditionsResetCriteria from a JSON string +io_argoproj_events_v1alpha1_conditions_reset_criteria_instance = IoArgoprojEventsV1alpha1ConditionsResetCriteria.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1ConditionsResetCriteria.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_conditions_reset_criteria_dict = io_argoproj_events_v1alpha1_conditions_reset_criteria_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1ConditionsResetCriteria from a dict +io_argoproj_events_v1alpha1_conditions_reset_criteria_form_dict = io_argoproj_events_v1alpha1_conditions_reset_criteria.from_dict(io_argoproj_events_v1alpha1_conditions_reset_criteria_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md index 3b1b1d92cfc3..1146a45db3b8 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **create_if_not_exist** | **bool** | | [optional] **name** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_config_map_persistence import IoArgoprojEventsV1alpha1ConfigMapPersistence + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1ConfigMapPersistence from a JSON string +io_argoproj_events_v1alpha1_config_map_persistence_instance = IoArgoprojEventsV1alpha1ConfigMapPersistence.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1ConfigMapPersistence.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_config_map_persistence_dict = io_argoproj_events_v1alpha1_config_map_persistence_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1ConfigMapPersistence from a dict +io_argoproj_events_v1alpha1_config_map_persistence_form_dict = io_argoproj_events_v1alpha1_config_map_persistence.from_dict(io_argoproj_events_v1alpha1_config_map_persistence_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md index b8338d9fc504..52966472d781 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md @@ -3,17 +3,34 @@ CustomTrigger refers to the specification of the custom trigger. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cert_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved custom trigger trigger object. | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved custom trigger trigger object. | [optional] +**payload** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **secure** | **bool** | | [optional] **server_name_override** | **str** | ServerNameOverride for the secure connection between sensor and custom trigger gRPC server. | [optional] **server_url** | **str** | | [optional] -**spec** | **{str: (str,)}** | Spec is the custom trigger resource specification that custom trigger gRPC server knows how to interpret. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**spec** | **Dict[str, str]** | Spec is the custom trigger resource specification that custom trigger gRPC server knows how to interpret. | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_custom_trigger import IoArgoprojEventsV1alpha1CustomTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1CustomTrigger from a JSON string +io_argoproj_events_v1alpha1_custom_trigger_instance = IoArgoprojEventsV1alpha1CustomTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1CustomTrigger.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_custom_trigger_dict = io_argoproj_events_v1alpha1_custom_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1CustomTrigger from a dict +io_argoproj_events_v1alpha1_custom_trigger_form_dict = io_argoproj_events_v1alpha1_custom_trigger.from_dict(io_argoproj_events_v1alpha1_custom_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1DataFilter.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1DataFilter.md index 44d16cb3af87..5ea043c6c663 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1DataFilter.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1DataFilter.md @@ -2,15 +2,32 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **comparator** | **str** | Comparator compares the event data with a user given value. Can be \">=\", \">\", \"=\", \"!=\", \"<\", or \"<=\". Is optional, and if left blank treated as equality \"=\". | [optional] **path** | **str** | Path is the JSONPath of the event's (JSON decoded) data key Path is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this. | [optional] **template** | **str** | | [optional] **type** | **str** | | [optional] -**value** | **[str]** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**value** | **List[str]** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_data_filter import IoArgoprojEventsV1alpha1DataFilter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1DataFilter from a JSON string +io_argoproj_events_v1alpha1_data_filter_instance = IoArgoprojEventsV1alpha1DataFilter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1DataFilter.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_data_filter_dict = io_argoproj_events_v1alpha1_data_filter_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1DataFilter from a dict +io_argoproj_events_v1alpha1_data_filter_form_dict = io_argoproj_events_v1alpha1_data_filter.from_dict(io_argoproj_events_v1alpha1_data_filter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md index 60193ce7ab1b..ac4b864f04e2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md @@ -3,19 +3,36 @@ EmailTrigger refers to the specification of the email notification trigger. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **body** | **str** | | [optional] -**_from** | **str** | | [optional] +**var_from** | **str** | | [optional] **host** | **str** | Host refers to the smtp host url to which email is send. | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] **port** | **int** | | [optional] **smtp_password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **subject** | **str** | | [optional] -**to** | **[str]** | | [optional] +**to** | **List[str]** | | [optional] **username** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_email_trigger import IoArgoprojEventsV1alpha1EmailTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EmailTrigger from a JSON string +io_argoproj_events_v1alpha1_email_trigger_instance = IoArgoprojEventsV1alpha1EmailTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EmailTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_email_trigger_dict = io_argoproj_events_v1alpha1_email_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EmailTrigger from a dict +io_argoproj_events_v1alpha1_email_trigger_form_dict = io_argoproj_events_v1alpha1_email_trigger.from_dict(io_argoproj_events_v1alpha1_email_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md index 550827d6bebb..c10343790022 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **broker** | **str** | Broker URI to connect to. | [optional] @@ -10,12 +11,28 @@ Name | Type | Description | Notes **connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **username** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_emitter_event_source import IoArgoprojEventsV1alpha1EmitterEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EmitterEventSource from a JSON string +io_argoproj_events_v1alpha1_emitter_event_source_instance = IoArgoprojEventsV1alpha1EmitterEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EmitterEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_emitter_event_source_dict = io_argoproj_events_v1alpha1_emitter_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EmitterEventSource from a dict +io_argoproj_events_v1alpha1_emitter_event_source_form_dict = io_argoproj_events_v1alpha1_emitter_event_source.from_dict(io_argoproj_events_v1alpha1_emitter_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventContext.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventContext.md index c2608917fa6e..0237417dfaf4 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventContext.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventContext.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **datacontenttype** | **str** | DataContentType - A MIME (RFC2046) string describing the media type of `data`. | [optional] @@ -11,8 +12,24 @@ Name | Type | Description | Notes **subject** | **str** | | [optional] **time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **type** | **str** | Type - The type of the occurrence which has happened. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_context import IoArgoprojEventsV1alpha1EventContext + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventContext from a JSON string +io_argoproj_events_v1alpha1_event_context_instance = IoArgoprojEventsV1alpha1EventContext.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventContext.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_event_context_dict = io_argoproj_events_v1alpha1_event_context_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventContext from a dict +io_argoproj_events_v1alpha1_event_context_form_dict = io_argoproj_events_v1alpha1_event_context.from_dict(io_argoproj_events_v1alpha1_event_context_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependency.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependency.md index 57f84d47b760..da8b19b8d4e2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependency.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependency.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **event_name** | **str** | | [optional] @@ -10,8 +11,24 @@ Name | Type | Description | Notes **filters_logical_operator** | **str** | FiltersLogicalOperator defines how different filters are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&). | [optional] **name** | **str** | | [optional] **transform** | [**IoArgoprojEventsV1alpha1EventDependencyTransformer**](IoArgoprojEventsV1alpha1EventDependencyTransformer.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency import IoArgoprojEventsV1alpha1EventDependency + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventDependency from a JSON string +io_argoproj_events_v1alpha1_event_dependency_instance = IoArgoprojEventsV1alpha1EventDependency.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventDependency.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_event_dependency_dict = io_argoproj_events_v1alpha1_event_dependency_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventDependency from a dict +io_argoproj_events_v1alpha1_event_dependency_form_dict = io_argoproj_events_v1alpha1_event_dependency.from_dict(io_argoproj_events_v1alpha1_event_dependency_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md index cde7363bffdc..15fa0db5cdd9 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md @@ -3,17 +3,34 @@ EventDependencyFilter defines filters and constraints for a io.argoproj.workflow.v1alpha1. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **context** | [**IoArgoprojEventsV1alpha1EventContext**](IoArgoprojEventsV1alpha1EventContext.md) | | [optional] -**data** | [**[IoArgoprojEventsV1alpha1DataFilter]**](IoArgoprojEventsV1alpha1DataFilter.md) | | [optional] +**data** | [**List[IoArgoprojEventsV1alpha1DataFilter]**](IoArgoprojEventsV1alpha1DataFilter.md) | | [optional] **data_logical_operator** | **str** | DataLogicalOperator defines how multiple Data filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&). | [optional] **expr_logical_operator** | **str** | ExprLogicalOperator defines how multiple Exprs filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&). | [optional] -**exprs** | [**[IoArgoprojEventsV1alpha1ExprFilter]**](IoArgoprojEventsV1alpha1ExprFilter.md) | Exprs contains the list of expressions evaluated against the event payload. | [optional] +**exprs** | [**List[IoArgoprojEventsV1alpha1ExprFilter]**](IoArgoprojEventsV1alpha1ExprFilter.md) | Exprs contains the list of expressions evaluated against the event payload. | [optional] **script** | **str** | Script refers to a Lua script evaluated to determine the validity of an io.argoproj.workflow.v1alpha1. | [optional] **time** | [**IoArgoprojEventsV1alpha1TimeFilter**](IoArgoprojEventsV1alpha1TimeFilter.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency_filter import IoArgoprojEventsV1alpha1EventDependencyFilter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventDependencyFilter from a JSON string +io_argoproj_events_v1alpha1_event_dependency_filter_instance = IoArgoprojEventsV1alpha1EventDependencyFilter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventDependencyFilter.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_event_dependency_filter_dict = io_argoproj_events_v1alpha1_event_dependency_filter_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventDependencyFilter from a dict +io_argoproj_events_v1alpha1_event_dependency_filter_form_dict = io_argoproj_events_v1alpha1_event_dependency_filter.from_dict(io_argoproj_events_v1alpha1_event_dependency_filter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md index deeaf11d5e1b..af5caf9048a2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **jq** | **str** | | [optional] **script** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_dependency_transformer import IoArgoprojEventsV1alpha1EventDependencyTransformer + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventDependencyTransformer from a JSON string +io_argoproj_events_v1alpha1_event_dependency_transformer_instance = IoArgoprojEventsV1alpha1EventDependencyTransformer.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventDependencyTransformer.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_event_dependency_transformer_dict = io_argoproj_events_v1alpha1_event_dependency_transformer_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventDependencyTransformer from a dict +io_argoproj_events_v1alpha1_event_dependency_transformer_form_dict = io_argoproj_events_v1alpha1_event_dependency_transformer.from_dict(io_argoproj_events_v1alpha1_event_dependency_transformer_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md index a7d85878c6f0..9393549043d8 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **catchup** | [**IoArgoprojEventsV1alpha1CatchupConfiguration**](IoArgoprojEventsV1alpha1CatchupConfiguration.md) | | [optional] **config_map** | [**IoArgoprojEventsV1alpha1ConfigMapPersistence**](IoArgoprojEventsV1alpha1ConfigMapPersistence.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_persistence import IoArgoprojEventsV1alpha1EventPersistence + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventPersistence from a JSON string +io_argoproj_events_v1alpha1_event_persistence_instance = IoArgoprojEventsV1alpha1EventPersistence.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventPersistence.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_event_persistence_dict = io_argoproj_events_v1alpha1_event_persistence_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventPersistence from a dict +io_argoproj_events_v1alpha1_event_persistence_form_dict = io_argoproj_events_v1alpha1_event_persistence.from_dict(io_argoproj_events_v1alpha1_event_persistence_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSource.md index d7ed376ebf36..7a2e349bb4e7 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSource.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] **spec** | [**IoArgoprojEventsV1alpha1EventSourceSpec**](IoArgoprojEventsV1alpha1EventSourceSpec.md) | | [optional] **status** | [**IoArgoprojEventsV1alpha1EventSourceStatus**](IoArgoprojEventsV1alpha1EventSourceStatus.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventSource from a JSON string +io_argoproj_events_v1alpha1_event_source_instance = IoArgoprojEventsV1alpha1EventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_event_source_dict = io_argoproj_events_v1alpha1_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventSource from a dict +io_argoproj_events_v1alpha1_event_source_form_dict = io_argoproj_events_v1alpha1_event_source.from_dict(io_argoproj_events_v1alpha1_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md index 9465351684e5..02658a880812 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **expression** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventSourceFilter from a JSON string +io_argoproj_events_v1alpha1_event_source_filter_instance = IoArgoprojEventsV1alpha1EventSourceFilter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventSourceFilter.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_event_source_filter_dict = io_argoproj_events_v1alpha1_event_source_filter_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventSourceFilter from a dict +io_argoproj_events_v1alpha1_event_source_filter_form_dict = io_argoproj_events_v1alpha1_event_source_filter.from_dict(io_argoproj_events_v1alpha1_event_source_filter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md index 87fffdf0a007..b9850513b2f0 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[IoArgoprojEventsV1alpha1EventSource]**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] +**items** | [**List[IoArgoprojEventsV1alpha1EventSource]**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] **metadata** | [**ListMeta**](ListMeta.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventSourceList from a JSON string +io_argoproj_events_v1alpha1_event_source_list_instance = IoArgoprojEventsV1alpha1EventSourceList.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventSourceList.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_event_source_list_dict = io_argoproj_events_v1alpha1_event_source_list_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventSourceList from a dict +io_argoproj_events_v1alpha1_event_source_list_form_dict = io_argoproj_events_v1alpha1_event_source_list.from_dict(io_argoproj_events_v1alpha1_event_source_list_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md index 060cb041f0a1..03a2caa9000d 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md @@ -2,45 +2,62 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**amqp** | [**{str: (IoArgoprojEventsV1alpha1AMQPEventSource,)}**](IoArgoprojEventsV1alpha1AMQPEventSource.md) | | [optional] -**azure_events_hub** | [**{str: (IoArgoprojEventsV1alpha1AzureEventsHubEventSource,)}**](IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md) | | [optional] -**azure_queue_storage** | [**{str: (IoArgoprojEventsV1alpha1AzureQueueStorageEventSource,)}**](IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md) | | [optional] -**azure_service_bus** | [**{str: (IoArgoprojEventsV1alpha1AzureServiceBusEventSource,)}**](IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md) | | [optional] -**bitbucket** | [**{str: (IoArgoprojEventsV1alpha1BitbucketEventSource,)}**](IoArgoprojEventsV1alpha1BitbucketEventSource.md) | | [optional] -**bitbucketserver** | [**{str: (IoArgoprojEventsV1alpha1BitbucketServerEventSource,)}**](IoArgoprojEventsV1alpha1BitbucketServerEventSource.md) | | [optional] -**calendar** | [**{str: (IoArgoprojEventsV1alpha1CalendarEventSource,)}**](IoArgoprojEventsV1alpha1CalendarEventSource.md) | | [optional] -**emitter** | [**{str: (IoArgoprojEventsV1alpha1EmitterEventSource,)}**](IoArgoprojEventsV1alpha1EmitterEventSource.md) | | [optional] +**amqp** | [**Dict[str, IoArgoprojEventsV1alpha1AMQPEventSource]**](IoArgoprojEventsV1alpha1AMQPEventSource.md) | | [optional] +**azure_events_hub** | [**Dict[str, IoArgoprojEventsV1alpha1AzureEventsHubEventSource]**](IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md) | | [optional] +**azure_queue_storage** | [**Dict[str, IoArgoprojEventsV1alpha1AzureQueueStorageEventSource]**](IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md) | | [optional] +**azure_service_bus** | [**Dict[str, IoArgoprojEventsV1alpha1AzureServiceBusEventSource]**](IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md) | | [optional] +**bitbucket** | [**Dict[str, IoArgoprojEventsV1alpha1BitbucketEventSource]**](IoArgoprojEventsV1alpha1BitbucketEventSource.md) | | [optional] +**bitbucketserver** | [**Dict[str, IoArgoprojEventsV1alpha1BitbucketServerEventSource]**](IoArgoprojEventsV1alpha1BitbucketServerEventSource.md) | | [optional] +**calendar** | [**Dict[str, IoArgoprojEventsV1alpha1CalendarEventSource]**](IoArgoprojEventsV1alpha1CalendarEventSource.md) | | [optional] +**emitter** | [**Dict[str, IoArgoprojEventsV1alpha1EmitterEventSource]**](IoArgoprojEventsV1alpha1EmitterEventSource.md) | | [optional] **event_bus_name** | **str** | | [optional] -**file** | [**{str: (IoArgoprojEventsV1alpha1FileEventSource,)}**](IoArgoprojEventsV1alpha1FileEventSource.md) | | [optional] -**generic** | [**{str: (IoArgoprojEventsV1alpha1GenericEventSource,)}**](IoArgoprojEventsV1alpha1GenericEventSource.md) | | [optional] -**gerrit** | [**{str: (IoArgoprojEventsV1alpha1GerritEventSource,)}**](IoArgoprojEventsV1alpha1GerritEventSource.md) | | [optional] -**github** | [**{str: (IoArgoprojEventsV1alpha1GithubEventSource,)}**](IoArgoprojEventsV1alpha1GithubEventSource.md) | | [optional] -**gitlab** | [**{str: (IoArgoprojEventsV1alpha1GitlabEventSource,)}**](IoArgoprojEventsV1alpha1GitlabEventSource.md) | | [optional] -**hdfs** | [**{str: (IoArgoprojEventsV1alpha1HDFSEventSource,)}**](IoArgoprojEventsV1alpha1HDFSEventSource.md) | | [optional] -**kafka** | [**{str: (IoArgoprojEventsV1alpha1KafkaEventSource,)}**](IoArgoprojEventsV1alpha1KafkaEventSource.md) | | [optional] -**minio** | [**{str: (IoArgoprojEventsV1alpha1S3Artifact,)}**](IoArgoprojEventsV1alpha1S3Artifact.md) | | [optional] -**mqtt** | [**{str: (IoArgoprojEventsV1alpha1MQTTEventSource,)}**](IoArgoprojEventsV1alpha1MQTTEventSource.md) | | [optional] -**nats** | [**{str: (IoArgoprojEventsV1alpha1NATSEventsSource,)}**](IoArgoprojEventsV1alpha1NATSEventsSource.md) | | [optional] -**nsq** | [**{str: (IoArgoprojEventsV1alpha1NSQEventSource,)}**](IoArgoprojEventsV1alpha1NSQEventSource.md) | | [optional] -**pub_sub** | [**{str: (IoArgoprojEventsV1alpha1PubSubEventSource,)}**](IoArgoprojEventsV1alpha1PubSubEventSource.md) | | [optional] -**pulsar** | [**{str: (IoArgoprojEventsV1alpha1PulsarEventSource,)}**](IoArgoprojEventsV1alpha1PulsarEventSource.md) | | [optional] -**redis** | [**{str: (IoArgoprojEventsV1alpha1RedisEventSource,)}**](IoArgoprojEventsV1alpha1RedisEventSource.md) | | [optional] -**redis_stream** | [**{str: (IoArgoprojEventsV1alpha1RedisStreamEventSource,)}**](IoArgoprojEventsV1alpha1RedisStreamEventSource.md) | | [optional] +**file** | [**Dict[str, IoArgoprojEventsV1alpha1FileEventSource]**](IoArgoprojEventsV1alpha1FileEventSource.md) | | [optional] +**generic** | [**Dict[str, IoArgoprojEventsV1alpha1GenericEventSource]**](IoArgoprojEventsV1alpha1GenericEventSource.md) | | [optional] +**gerrit** | [**Dict[str, IoArgoprojEventsV1alpha1GerritEventSource]**](IoArgoprojEventsV1alpha1GerritEventSource.md) | | [optional] +**github** | [**Dict[str, IoArgoprojEventsV1alpha1GithubEventSource]**](IoArgoprojEventsV1alpha1GithubEventSource.md) | | [optional] +**gitlab** | [**Dict[str, IoArgoprojEventsV1alpha1GitlabEventSource]**](IoArgoprojEventsV1alpha1GitlabEventSource.md) | | [optional] +**hdfs** | [**Dict[str, IoArgoprojEventsV1alpha1HDFSEventSource]**](IoArgoprojEventsV1alpha1HDFSEventSource.md) | | [optional] +**kafka** | [**Dict[str, IoArgoprojEventsV1alpha1KafkaEventSource]**](IoArgoprojEventsV1alpha1KafkaEventSource.md) | | [optional] +**minio** | [**Dict[str, IoArgoprojEventsV1alpha1S3Artifact]**](IoArgoprojEventsV1alpha1S3Artifact.md) | | [optional] +**mqtt** | [**Dict[str, IoArgoprojEventsV1alpha1MQTTEventSource]**](IoArgoprojEventsV1alpha1MQTTEventSource.md) | | [optional] +**nats** | [**Dict[str, IoArgoprojEventsV1alpha1NATSEventsSource]**](IoArgoprojEventsV1alpha1NATSEventsSource.md) | | [optional] +**nsq** | [**Dict[str, IoArgoprojEventsV1alpha1NSQEventSource]**](IoArgoprojEventsV1alpha1NSQEventSource.md) | | [optional] +**pub_sub** | [**Dict[str, IoArgoprojEventsV1alpha1PubSubEventSource]**](IoArgoprojEventsV1alpha1PubSubEventSource.md) | | [optional] +**pulsar** | [**Dict[str, IoArgoprojEventsV1alpha1PulsarEventSource]**](IoArgoprojEventsV1alpha1PulsarEventSource.md) | | [optional] +**redis** | [**Dict[str, IoArgoprojEventsV1alpha1RedisEventSource]**](IoArgoprojEventsV1alpha1RedisEventSource.md) | | [optional] +**redis_stream** | [**Dict[str, IoArgoprojEventsV1alpha1RedisStreamEventSource]**](IoArgoprojEventsV1alpha1RedisStreamEventSource.md) | | [optional] **replicas** | **int** | | [optional] -**resource** | [**{str: (IoArgoprojEventsV1alpha1ResourceEventSource,)}**](IoArgoprojEventsV1alpha1ResourceEventSource.md) | | [optional] +**resource** | [**Dict[str, IoArgoprojEventsV1alpha1ResourceEventSource]**](IoArgoprojEventsV1alpha1ResourceEventSource.md) | | [optional] **service** | [**IoArgoprojEventsV1alpha1Service**](IoArgoprojEventsV1alpha1Service.md) | | [optional] -**sftp** | [**{str: (IoArgoprojEventsV1alpha1SFTPEventSource,)}**](IoArgoprojEventsV1alpha1SFTPEventSource.md) | | [optional] -**slack** | [**{str: (IoArgoprojEventsV1alpha1SlackEventSource,)}**](IoArgoprojEventsV1alpha1SlackEventSource.md) | | [optional] -**sns** | [**{str: (IoArgoprojEventsV1alpha1SNSEventSource,)}**](IoArgoprojEventsV1alpha1SNSEventSource.md) | | [optional] -**sqs** | [**{str: (IoArgoprojEventsV1alpha1SQSEventSource,)}**](IoArgoprojEventsV1alpha1SQSEventSource.md) | | [optional] -**storage_grid** | [**{str: (IoArgoprojEventsV1alpha1StorageGridEventSource,)}**](IoArgoprojEventsV1alpha1StorageGridEventSource.md) | | [optional] -**stripe** | [**{str: (IoArgoprojEventsV1alpha1StripeEventSource,)}**](IoArgoprojEventsV1alpha1StripeEventSource.md) | | [optional] +**sftp** | [**Dict[str, IoArgoprojEventsV1alpha1SFTPEventSource]**](IoArgoprojEventsV1alpha1SFTPEventSource.md) | | [optional] +**slack** | [**Dict[str, IoArgoprojEventsV1alpha1SlackEventSource]**](IoArgoprojEventsV1alpha1SlackEventSource.md) | | [optional] +**sns** | [**Dict[str, IoArgoprojEventsV1alpha1SNSEventSource]**](IoArgoprojEventsV1alpha1SNSEventSource.md) | | [optional] +**sqs** | [**Dict[str, IoArgoprojEventsV1alpha1SQSEventSource]**](IoArgoprojEventsV1alpha1SQSEventSource.md) | | [optional] +**storage_grid** | [**Dict[str, IoArgoprojEventsV1alpha1StorageGridEventSource]**](IoArgoprojEventsV1alpha1StorageGridEventSource.md) | | [optional] +**stripe** | [**Dict[str, IoArgoprojEventsV1alpha1StripeEventSource]**](IoArgoprojEventsV1alpha1StripeEventSource.md) | | [optional] **template** | [**IoArgoprojEventsV1alpha1Template**](IoArgoprojEventsV1alpha1Template.md) | | [optional] -**webhook** | [**{str: (IoArgoprojEventsV1alpha1WebhookEventSource,)}**](IoArgoprojEventsV1alpha1WebhookEventSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**webhook** | [**Dict[str, IoArgoprojEventsV1alpha1WebhookEventSource]**](IoArgoprojEventsV1alpha1WebhookEventSource.md) | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_spec import IoArgoprojEventsV1alpha1EventSourceSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventSourceSpec from a JSON string +io_argoproj_events_v1alpha1_event_source_spec_instance = IoArgoprojEventsV1alpha1EventSourceSpec.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventSourceSpec.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_event_source_spec_dict = io_argoproj_events_v1alpha1_event_source_spec_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventSourceSpec from a dict +io_argoproj_events_v1alpha1_event_source_spec_form_dict = io_argoproj_events_v1alpha1_event_source_spec.from_dict(io_argoproj_events_v1alpha1_event_source_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md index cc92d67a515a..874eea6c8f07 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **status** | [**IoArgoprojEventsV1alpha1Status**](IoArgoprojEventsV1alpha1Status.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_event_source_status import IoArgoprojEventsV1alpha1EventSourceStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1EventSourceStatus from a JSON string +io_argoproj_events_v1alpha1_event_source_status_instance = IoArgoprojEventsV1alpha1EventSourceStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1EventSourceStatus.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_event_source_status_dict = io_argoproj_events_v1alpha1_event_source_status_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1EventSourceStatus from a dict +io_argoproj_events_v1alpha1_event_source_status_form_dict = io_argoproj_events_v1alpha1_event_source_status.from_dict(io_argoproj_events_v1alpha1_event_source_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md index be5739ec9754..3c23e2c0d1cb 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **expr** | **str** | Expr refers to the expression that determines the outcome of the filter. | [optional] -**fields** | [**[IoArgoprojEventsV1alpha1PayloadField]**](IoArgoprojEventsV1alpha1PayloadField.md) | Fields refers to set of keys that refer to the paths within event payload. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**fields** | [**List[IoArgoprojEventsV1alpha1PayloadField]**](IoArgoprojEventsV1alpha1PayloadField.md) | Fields refers to set of keys that refer to the paths within event payload. | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_expr_filter import IoArgoprojEventsV1alpha1ExprFilter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1ExprFilter from a JSON string +io_argoproj_events_v1alpha1_expr_filter_instance = IoArgoprojEventsV1alpha1ExprFilter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1ExprFilter.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_expr_filter_dict = io_argoproj_events_v1alpha1_expr_filter_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1ExprFilter from a dict +io_argoproj_events_v1alpha1_expr_filter_form_dict = io_argoproj_events_v1alpha1_expr_filter.from_dict(io_argoproj_events_v1alpha1_expr_filter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md index 1378d2e445a0..e6d72e39d82a 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **path** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_file_artifact import IoArgoprojEventsV1alpha1FileArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1FileArtifact from a JSON string +io_argoproj_events_v1alpha1_file_artifact_instance = IoArgoprojEventsV1alpha1FileArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1FileArtifact.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_file_artifact_dict = io_argoproj_events_v1alpha1_file_artifact_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1FileArtifact from a dict +io_argoproj_events_v1alpha1_file_artifact_form_dict = io_argoproj_events_v1alpha1_file_artifact.from_dict(io_argoproj_events_v1alpha1_file_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md index 4faab6841ac2..b6147f77134a 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md @@ -3,15 +3,32 @@ FileEventSource describes an event-source for file related events. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **event_type** | **str** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **polling** | **bool** | | [optional] **watch_path_config** | [**IoArgoprojEventsV1alpha1WatchPathConfig**](IoArgoprojEventsV1alpha1WatchPathConfig.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_file_event_source import IoArgoprojEventsV1alpha1FileEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1FileEventSource from a JSON string +io_argoproj_events_v1alpha1_file_event_source_instance = IoArgoprojEventsV1alpha1FileEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1FileEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_file_event_source_dict = io_argoproj_events_v1alpha1_file_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1FileEventSource from a dict +io_argoproj_events_v1alpha1_file_event_source_form_dict = io_argoproj_events_v1alpha1_file_event_source.from_dict(io_argoproj_events_v1alpha1_file_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md index ece5e692918b..8ca6d8f3c4c1 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md @@ -3,6 +3,7 @@ GenericEventSource refers to a generic event source. It can be used to implement a custom event source. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auth_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] @@ -10,10 +11,26 @@ Name | Type | Description | Notes **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **insecure** | **bool** | Insecure determines the type of connection. | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **url** | **str** | URL of the gRPC server that implements the event source. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_generic_event_source import IoArgoprojEventsV1alpha1GenericEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1GenericEventSource from a JSON string +io_argoproj_events_v1alpha1_generic_event_source_instance = IoArgoprojEventsV1alpha1GenericEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1GenericEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_generic_event_source_dict = io_argoproj_events_v1alpha1_generic_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1GenericEventSource from a dict +io_argoproj_events_v1alpha1_generic_event_source_form_dict = io_argoproj_events_v1alpha1_generic_event_source.from_dict(io_argoproj_events_v1alpha1_generic_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md index eebdd45b351f..e8c2db9d7431 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md @@ -2,20 +2,37 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] **delete_hook_on_finish** | **bool** | | [optional] -**events** | **[str]** | | [optional] +**events** | **List[str]** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **gerrit_base_url** | **str** | | [optional] **hook_name** | **str** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] -**projects** | **[str]** | List of project namespace paths like \"whynowy/test\". | [optional] +**metadata** | **Dict[str, str]** | | [optional] +**projects** | **List[str]** | List of project namespace paths like \"whynowy/test\". | [optional] **ssl_verify** | **bool** | | [optional] **webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_gerrit_event_source import IoArgoprojEventsV1alpha1GerritEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1GerritEventSource from a JSON string +io_argoproj_events_v1alpha1_gerrit_event_source_instance = IoArgoprojEventsV1alpha1GerritEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1GerritEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_gerrit_event_source_dict = io_argoproj_events_v1alpha1_gerrit_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1GerritEventSource from a dict +io_argoproj_events_v1alpha1_gerrit_event_source_form_dict = io_argoproj_events_v1alpha1_gerrit_event_source.from_dict(io_argoproj_events_v1alpha1_gerrit_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md index bde5a82c5c59..d6673c21bc9c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **branch** | **str** | | [optional] @@ -14,8 +15,24 @@ Name | Type | Description | Notes **ssh_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **tag** | **str** | | [optional] **url** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_git_artifact import IoArgoprojEventsV1alpha1GitArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1GitArtifact from a JSON string +io_argoproj_events_v1alpha1_git_artifact_instance = IoArgoprojEventsV1alpha1GitArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1GitArtifact.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_git_artifact_dict = io_argoproj_events_v1alpha1_git_artifact_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1GitArtifact from a dict +io_argoproj_events_v1alpha1_git_artifact_form_dict = io_argoproj_events_v1alpha1_git_artifact.from_dict(io_argoproj_events_v1alpha1_git_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitCreds.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitCreds.md index 01cccc15d5b6..409cc5117097 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitCreds.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitCreds.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **username** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_git_creds import IoArgoprojEventsV1alpha1GitCreds + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1GitCreds from a JSON string +io_argoproj_events_v1alpha1_git_creds_instance = IoArgoprojEventsV1alpha1GitCreds.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1GitCreds.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_git_creds_dict = io_argoproj_events_v1alpha1_git_creds_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1GitCreds from a dict +io_argoproj_events_v1alpha1_git_creds_form_dict = io_argoproj_events_v1alpha1_git_creds.from_dict(io_argoproj_events_v1alpha1_git_creds_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md index 1ceb72c6fc51..07b1695ec802 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name of the remote to fetch from. | [optional] -**urls** | **[str]** | URLs the URLs of a remote repository. It must be non-empty. Fetch will always use the first URL, while push will use all of them. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**urls** | **List[str]** | URLs the URLs of a remote repository. It must be non-empty. Fetch will always use the first URL, while push will use all of them. | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_git_remote_config import IoArgoprojEventsV1alpha1GitRemoteConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1GitRemoteConfig from a JSON string +io_argoproj_events_v1alpha1_git_remote_config_instance = IoArgoprojEventsV1alpha1GitRemoteConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1GitRemoteConfig.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_git_remote_config_dict = io_argoproj_events_v1alpha1_git_remote_config_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1GitRemoteConfig from a dict +io_argoproj_events_v1alpha1_git_remote_config_form_dict = io_argoproj_events_v1alpha1_git_remote_config.from_dict(io_argoproj_events_v1alpha1_git_remote_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md index dee75a2bf9e8..92dcf201e365 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **app_id** | **str** | | [optional] **installation_id** | **str** | | [optional] **private_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_github_app_creds import IoArgoprojEventsV1alpha1GithubAppCreds + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1GithubAppCreds from a JSON string +io_argoproj_events_v1alpha1_github_app_creds_instance = IoArgoprojEventsV1alpha1GithubAppCreds.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1GithubAppCreds.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_github_app_creds_dict = io_argoproj_events_v1alpha1_github_app_creds_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1GithubAppCreds from a dict +io_argoproj_events_v1alpha1_github_app_creds_form_dict = io_argoproj_events_v1alpha1_github_app_creds.from_dict(io_argoproj_events_v1alpha1_github_app_creds_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md index aae17bff0524..b5f8ee3500ba 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md @@ -2,28 +2,45 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **active** | **bool** | | [optional] **api_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **content_type** | **str** | | [optional] **delete_hook_on_finish** | **bool** | | [optional] -**events** | **[str]** | | [optional] +**events** | **List[str]** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **github_app** | [**IoArgoprojEventsV1alpha1GithubAppCreds**](IoArgoprojEventsV1alpha1GithubAppCreds.md) | | [optional] **github_base_url** | **str** | | [optional] **github_upload_url** | **str** | | [optional] **id** | **str** | | [optional] **insecure** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] -**organizations** | **[str]** | Organizations holds the names of organizations (used for organization level webhooks). Not required if Repositories is set. | [optional] +**metadata** | **Dict[str, str]** | | [optional] +**organizations** | **List[str]** | Organizations holds the names of organizations (used for organization level webhooks). Not required if Repositories is set. | [optional] **owner** | **str** | | [optional] -**repositories** | [**[IoArgoprojEventsV1alpha1OwnedRepositories]**](IoArgoprojEventsV1alpha1OwnedRepositories.md) | Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set. | [optional] +**repositories** | [**List[IoArgoprojEventsV1alpha1OwnedRepositories]**](IoArgoprojEventsV1alpha1OwnedRepositories.md) | Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set. | [optional] **repository** | **str** | | [optional] **webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] **webhook_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_github_event_source import IoArgoprojEventsV1alpha1GithubEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1GithubEventSource from a JSON string +io_argoproj_events_v1alpha1_github_event_source_instance = IoArgoprojEventsV1alpha1GithubEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1GithubEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_github_event_source_dict = io_argoproj_events_v1alpha1_github_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1GithubEventSource from a dict +io_argoproj_events_v1alpha1_github_event_source_form_dict = io_argoproj_events_v1alpha1_github_event_source.from_dict(io_argoproj_events_v1alpha1_github_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md index d8e7a9627ea0..09c6a4d0fb37 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md @@ -2,22 +2,39 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **delete_hook_on_finish** | **bool** | | [optional] **enable_ssl_verification** | **bool** | | [optional] -**events** | **[str]** | Events are gitlab event to listen to. Refer https://github.com/xanzy/go-gitlab/blob/bf34eca5d13a9f4c3f501d8a97b8ac226d55e4d9/projects.go#L794. | [optional] +**events** | **List[str]** | Events are gitlab event to listen to. Refer https://github.com/xanzy/go-gitlab/blob/bf34eca5d13a9f4c3f501d8a97b8ac226d55e4d9/projects.go#L794. | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **gitlab_base_url** | **str** | | [optional] -**groups** | **[str]** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**groups** | **List[str]** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **project_id** | **str** | | [optional] -**projects** | **[str]** | | [optional] +**projects** | **List[str]** | | [optional] **secret_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_gitlab_event_source import IoArgoprojEventsV1alpha1GitlabEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1GitlabEventSource from a JSON string +io_argoproj_events_v1alpha1_gitlab_event_source_instance = IoArgoprojEventsV1alpha1GitlabEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1GitlabEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_gitlab_event_source_dict = io_argoproj_events_v1alpha1_gitlab_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1GitlabEventSource from a dict +io_argoproj_events_v1alpha1_gitlab_event_source_form_dict = io_argoproj_events_v1alpha1_gitlab_event_source.from_dict(io_argoproj_events_v1alpha1_gitlab_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md index 9173487e0cbf..d1cdab546122 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md @@ -2,9 +2,10 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**addresses** | **[str]** | | [optional] +**addresses** | **List[str]** | | [optional] **check_interval** | **str** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **hdfs_user** | **str** | HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used. | [optional] @@ -14,11 +15,27 @@ Name | Type | Description | Notes **krb_realm** | **str** | KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used. | [optional] **krb_service_principal_name** | **str** | KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used. | [optional] **krb_username** | **str** | KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used. | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **type** | **str** | | [optional] **watch_path_config** | [**IoArgoprojEventsV1alpha1WatchPathConfig**](IoArgoprojEventsV1alpha1WatchPathConfig.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_hdfs_event_source import IoArgoprojEventsV1alpha1HDFSEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1HDFSEventSource from a JSON string +io_argoproj_events_v1alpha1_hdfs_event_source_instance = IoArgoprojEventsV1alpha1HDFSEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1HDFSEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_hdfs_event_source_dict = io_argoproj_events_v1alpha1_hdfs_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1HDFSEventSource from a dict +io_argoproj_events_v1alpha1_hdfs_event_source_form_dict = io_argoproj_events_v1alpha1_hdfs_event_source.from_dict(io_argoproj_events_v1alpha1_hdfs_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md index 258bd5459386..1031547cfb69 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md @@ -2,19 +2,36 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **basic_auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] -**headers** | **{str: (str,)}** | | [optional] +**headers** | **Dict[str, str]** | | [optional] **method** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource. | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**secure_headers** | [**[IoArgoprojEventsV1alpha1SecureHeader]**](IoArgoprojEventsV1alpha1SecureHeader.md) | | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource. | [optional] +**payload** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**secure_headers** | [**List[IoArgoprojEventsV1alpha1SecureHeader]**](IoArgoprojEventsV1alpha1SecureHeader.md) | | [optional] **timeout** | **str** | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **url** | **str** | URL refers to the URL to send HTTP request to. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_http_trigger import IoArgoprojEventsV1alpha1HTTPTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1HTTPTrigger from a JSON string +io_argoproj_events_v1alpha1_http_trigger_instance = IoArgoprojEventsV1alpha1HTTPTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1HTTPTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_http_trigger_dict = io_argoproj_events_v1alpha1_http_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1HTTPTrigger from a dict +io_argoproj_events_v1alpha1_http_trigger_form_dict = io_argoproj_events_v1alpha1_http_trigger.from_dict(io_argoproj_events_v1alpha1_http_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md index f0a3aa991d09..8fa90bc563b6 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **int64_val** | **str** | | [optional] **str_val** | **str** | | [optional] **type** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_int64_or_string import IoArgoprojEventsV1alpha1Int64OrString + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Int64OrString from a JSON string +io_argoproj_events_v1alpha1_int64_or_string_instance = IoArgoprojEventsV1alpha1Int64OrString.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Int64OrString.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_int64_or_string_dict = io_argoproj_events_v1alpha1_int64_or_string_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Int64OrString from a dict +io_argoproj_events_v1alpha1_int64_or_string_form_dict = io_argoproj_events_v1alpha1_int64_or_string.from_dict(io_argoproj_events_v1alpha1_int64_or_string_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md index 4f5244055474..6c5f3f0b6863 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] **error_on_backoff_timeout** | **bool** | | [optional] -**labels** | **{str: (str,)}** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**labels** | **Dict[str, str]** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_k8_s_resource_policy import IoArgoprojEventsV1alpha1K8SResourcePolicy + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1K8SResourcePolicy from a JSON string +io_argoproj_events_v1alpha1_k8_s_resource_policy_instance = IoArgoprojEventsV1alpha1K8SResourcePolicy.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1K8SResourcePolicy.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_k8_s_resource_policy_dict = io_argoproj_events_v1alpha1_k8_s_resource_policy_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1K8SResourcePolicy from a dict +io_argoproj_events_v1alpha1_k8_s_resource_policy_form_dict = io_argoproj_events_v1alpha1_k8_s_resource_policy.from_dict(io_argoproj_events_v1alpha1_k8_s_resource_policy_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md index 37bd57fc60ee..b6d8872a394e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **group_name** | **str** | | [optional] **oldest** | **bool** | | [optional] **rebalance_strategy** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_consumer_group import IoArgoprojEventsV1alpha1KafkaConsumerGroup + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1KafkaConsumerGroup from a JSON string +io_argoproj_events_v1alpha1_kafka_consumer_group_instance = IoArgoprojEventsV1alpha1KafkaConsumerGroup.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1KafkaConsumerGroup.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_kafka_consumer_group_dict = io_argoproj_events_v1alpha1_kafka_consumer_group_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1KafkaConsumerGroup from a dict +io_argoproj_events_v1alpha1_kafka_consumer_group_form_dict = io_argoproj_events_v1alpha1_kafka_consumer_group.from_dict(io_argoproj_events_v1alpha1_kafka_consumer_group_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md index c405c1022800..61443ebf3718 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config** | **str** | Yaml format Sarama config for Kafka connection. It follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go e.g. consumer: fetch: min: 1 net: MaxOpenRequests: 5 +optional | [optional] @@ -10,15 +11,31 @@ Name | Type | Description | Notes **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] **limit_events_per_second** | **str** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **partition** | **str** | | [optional] **sasl** | [**IoArgoprojEventsV1alpha1SASLConfig**](IoArgoprojEventsV1alpha1SASLConfig.md) | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **topic** | **str** | | [optional] **url** | **str** | | [optional] **version** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_event_source import IoArgoprojEventsV1alpha1KafkaEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1KafkaEventSource from a JSON string +io_argoproj_events_v1alpha1_kafka_event_source_instance = IoArgoprojEventsV1alpha1KafkaEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1KafkaEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_kafka_event_source_dict = io_argoproj_events_v1alpha1_kafka_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1KafkaEventSource from a dict +io_argoproj_events_v1alpha1_kafka_event_source_form_dict = io_argoproj_events_v1alpha1_kafka_event_source.from_dict(io_argoproj_events_v1alpha1_kafka_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md index 6fe39a9583b2..b751872ee2be 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md @@ -3,14 +3,15 @@ KafkaTrigger refers to the specification of the Kafka trigger. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **compress** | **bool** | | [optional] **flush_frequency** | **int** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] **partition** | **int** | | [optional] **partitioning_key** | **str** | The partitioning key for the messages put on the Kafka topic. +optional. | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**payload** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **required_acks** | **int** | RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional. | [optional] **sasl** | [**IoArgoprojEventsV1alpha1SASLConfig**](IoArgoprojEventsV1alpha1SASLConfig.md) | | [optional] **schema_registry** | [**IoArgoprojEventsV1alpha1SchemaRegistryConfig**](IoArgoprojEventsV1alpha1SchemaRegistryConfig.md) | | [optional] @@ -18,8 +19,24 @@ Name | Type | Description | Notes **topic** | **str** | | [optional] **url** | **str** | URL of the Kafka broker, multiple URLs separated by comma. | [optional] **version** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_kafka_trigger import IoArgoprojEventsV1alpha1KafkaTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1KafkaTrigger from a JSON string +io_argoproj_events_v1alpha1_kafka_trigger_instance = IoArgoprojEventsV1alpha1KafkaTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1KafkaTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_kafka_trigger_dict = io_argoproj_events_v1alpha1_kafka_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1KafkaTrigger from a dict +io_argoproj_events_v1alpha1_kafka_trigger_form_dict = io_argoproj_events_v1alpha1_kafka_trigger.from_dict(io_argoproj_events_v1alpha1_kafka_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md index a8de5480dce0..be5f32b50a0b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **interval_seconds** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_log_trigger import IoArgoprojEventsV1alpha1LogTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1LogTrigger from a JSON string +io_argoproj_events_v1alpha1_log_trigger_instance = IoArgoprojEventsV1alpha1LogTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1LogTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_log_trigger_dict = io_argoproj_events_v1alpha1_log_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1LogTrigger from a dict +io_argoproj_events_v1alpha1_log_trigger_form_dict = io_argoproj_events_v1alpha1_log_trigger.from_dict(io_argoproj_events_v1alpha1_log_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md index 5c9b08b700d9..044e5e08bc32 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] @@ -9,12 +10,28 @@ Name | Type | Description | Notes **connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **topic** | **str** | | [optional] **url** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_mqtt_event_source import IoArgoprojEventsV1alpha1MQTTEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1MQTTEventSource from a JSON string +io_argoproj_events_v1alpha1_mqtt_event_source_instance = IoArgoprojEventsV1alpha1MQTTEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1MQTTEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_mqtt_event_source_dict = io_argoproj_events_v1alpha1_mqtt_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1MQTTEventSource from a dict +io_argoproj_events_v1alpha1_mqtt_event_source_form_dict = io_argoproj_events_v1alpha1_mqtt_event_source.from_dict(io_argoproj_events_v1alpha1_mqtt_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Metadata.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Metadata.md index b7c578ba87af..da594686904e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Metadata.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Metadata.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**annotations** | **{str: (str,)}** | | [optional] -**labels** | **{str: (str,)}** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**annotations** | **Dict[str, str]** | | [optional] +**labels** | **Dict[str, str]** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_metadata import IoArgoprojEventsV1alpha1Metadata + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Metadata from a JSON string +io_argoproj_events_v1alpha1_metadata_instance = IoArgoprojEventsV1alpha1Metadata.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Metadata.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_metadata_dict = io_argoproj_events_v1alpha1_metadata_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Metadata from a dict +io_argoproj_events_v1alpha1_metadata_form_dict = io_argoproj_events_v1alpha1_metadata.from_dict(io_argoproj_events_v1alpha1_metadata_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md index 12620996759a..ddc7e75f5238 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **basic** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] **credential** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **nkey** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_auth import IoArgoprojEventsV1alpha1NATSAuth + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1NATSAuth from a JSON string +io_argoproj_events_v1alpha1_nats_auth_instance = IoArgoprojEventsV1alpha1NATSAuth.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1NATSAuth.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_nats_auth_dict = io_argoproj_events_v1alpha1_nats_auth_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1NATSAuth from a dict +io_argoproj_events_v1alpha1_nats_auth_form_dict = io_argoproj_events_v1alpha1_nats_auth.from_dict(io_argoproj_events_v1alpha1_nats_auth_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md index 7b3ee4432c5d..ecc1a6bba889 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md @@ -2,18 +2,35 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auth** | [**IoArgoprojEventsV1alpha1NATSAuth**](IoArgoprojEventsV1alpha1NATSAuth.md) | | [optional] **connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **subject** | **str** | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **url** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_events_source import IoArgoprojEventsV1alpha1NATSEventsSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1NATSEventsSource from a JSON string +io_argoproj_events_v1alpha1_nats_events_source_instance = IoArgoprojEventsV1alpha1NATSEventsSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1NATSEventsSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_nats_events_source_dict = io_argoproj_events_v1alpha1_nats_events_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1NATSEventsSource from a dict +io_argoproj_events_v1alpha1_nats_events_source_form_dict = io_argoproj_events_v1alpha1_nats_events_source.from_dict(io_argoproj_events_v1alpha1_nats_events_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md index 9259dfaa33b9..d09eef84c0c4 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md @@ -3,15 +3,32 @@ NATSTrigger refers to the specification of the NATS trigger. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] **subject** | **str** | Name of the subject to put message on. | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **url** | **str** | URL of the NATS cluster. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_nats_trigger import IoArgoprojEventsV1alpha1NATSTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1NATSTrigger from a JSON string +io_argoproj_events_v1alpha1_nats_trigger_instance = IoArgoprojEventsV1alpha1NATSTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1NATSTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_nats_trigger_dict = io_argoproj_events_v1alpha1_nats_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1NATSTrigger from a dict +io_argoproj_events_v1alpha1_nats_trigger_form_dict = io_argoproj_events_v1alpha1_nats_trigger.from_dict(io_argoproj_events_v1alpha1_nats_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md index 9306f47b5dcb..3601e5e448d4 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **channel** | **str** | | [optional] @@ -9,11 +10,27 @@ Name | Type | Description | Notes **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **host_address** | **str** | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **topic** | **str** | Topic to subscribe to. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_nsq_event_source import IoArgoprojEventsV1alpha1NSQEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1NSQEventSource from a JSON string +io_argoproj_events_v1alpha1_nsq_event_source_instance = IoArgoprojEventsV1alpha1NSQEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1NSQEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_nsq_event_source_dict = io_argoproj_events_v1alpha1_nsq_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1NSQEventSource from a dict +io_argoproj_events_v1alpha1_nsq_event_source_form_dict = io_argoproj_events_v1alpha1_nsq_event_source.from_dict(io_argoproj_events_v1alpha1_nsq_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md index dec3f6409e66..fcb0eba0423d 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md @@ -3,17 +3,34 @@ OpenWhiskTrigger refers to the specification of the OpenWhisk trigger. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **action_name** | **str** | Name of the action/function. | [optional] **auth_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **host** | **str** | Host URL of the OpenWhisk. | [optional] **namespace** | **str** | Namespace for the action. Defaults to \"_\". +optional. | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **version** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_open_whisk_trigger import IoArgoprojEventsV1alpha1OpenWhiskTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1OpenWhiskTrigger from a JSON string +io_argoproj_events_v1alpha1_open_whisk_trigger_instance = IoArgoprojEventsV1alpha1OpenWhiskTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1OpenWhiskTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_open_whisk_trigger_dict = io_argoproj_events_v1alpha1_open_whisk_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1OpenWhiskTrigger from a dict +io_argoproj_events_v1alpha1_open_whisk_trigger_form_dict = io_argoproj_events_v1alpha1_open_whisk_trigger.from_dict(io_argoproj_events_v1alpha1_open_whisk_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md index 4205b837d98f..7f0ce76f2506 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**names** | **[str]** | | [optional] +**names** | **List[str]** | | [optional] **owner** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_owned_repositories import IoArgoprojEventsV1alpha1OwnedRepositories + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1OwnedRepositories from a JSON string +io_argoproj_events_v1alpha1_owned_repositories_instance = IoArgoprojEventsV1alpha1OwnedRepositories.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1OwnedRepositories.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_owned_repositories_dict = io_argoproj_events_v1alpha1_owned_repositories_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1OwnedRepositories from a dict +io_argoproj_events_v1alpha1_owned_repositories_form_dict = io_argoproj_events_v1alpha1_owned_repositories.from_dict(io_argoproj_events_v1alpha1_owned_repositories_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PayloadField.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1PayloadField.md index 52c63a8edead..430cc4e48760 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PayloadField.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1PayloadField.md @@ -3,12 +3,29 @@ PayloadField binds a value at path within the event payload against a name. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name acts as key that holds the value at the path. | [optional] **path** | **str** | Path is the JSONPath of the event's (JSON decoded) data key Path is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_payload_field import IoArgoprojEventsV1alpha1PayloadField + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1PayloadField from a JSON string +io_argoproj_events_v1alpha1_payload_field_instance = IoArgoprojEventsV1alpha1PayloadField.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1PayloadField.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_payload_field_dict = io_argoproj_events_v1alpha1_payload_field_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1PayloadField from a dict +io_argoproj_events_v1alpha1_payload_field_form_dict = io_argoproj_events_v1alpha1_payload_field.from_dict(io_argoproj_events_v1alpha1_payload_field_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md index e5608ed945e5..57b85da59b11 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md @@ -3,19 +3,36 @@ PubSubEventSource refers to event-source for GCP PubSub related events. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **credential_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **delete_subscription_on_finish** | **bool** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **project_id** | **str** | | [optional] **subscription_id** | **str** | | [optional] **topic** | **str** | | [optional] **topic_project_id** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_pub_sub_event_source import IoArgoprojEventsV1alpha1PubSubEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1PubSubEventSource from a JSON string +io_argoproj_events_v1alpha1_pub_sub_event_source_instance = IoArgoprojEventsV1alpha1PubSubEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1PubSubEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_pub_sub_event_source_dict = io_argoproj_events_v1alpha1_pub_sub_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1PubSubEventSource from a dict +io_argoproj_events_v1alpha1_pub_sub_event_source_form_dict = io_argoproj_events_v1alpha1_pub_sub_event_source.from_dict(io_argoproj_events_v1alpha1_pub_sub_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md index 633cd68a9021..d9636f55c25c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md @@ -2,24 +2,41 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**auth_athenz_params** | **{str: (str,)}** | | [optional] +**auth_athenz_params** | **Dict[str, str]** | | [optional] **auth_athenz_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **auth_token_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **tls_allow_insecure_connection** | **bool** | | [optional] **tls_trust_certs_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **tls_validate_hostname** | **bool** | | [optional] -**topics** | **[str]** | | [optional] +**topics** | **List[str]** | | [optional] **type** | **str** | | [optional] **url** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_pulsar_event_source import IoArgoprojEventsV1alpha1PulsarEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1PulsarEventSource from a JSON string +io_argoproj_events_v1alpha1_pulsar_event_source_instance = IoArgoprojEventsV1alpha1PulsarEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1PulsarEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_pulsar_event_source_dict = io_argoproj_events_v1alpha1_pulsar_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1PulsarEventSource from a dict +io_argoproj_events_v1alpha1_pulsar_event_source_form_dict = io_argoproj_events_v1alpha1_pulsar_event_source.from_dict(io_argoproj_events_v1alpha1_pulsar_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md index e1a343d48bbb..ca16faebd98f 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md @@ -3,22 +3,39 @@ PulsarTrigger refers to the specification of the Pulsar trigger. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**auth_athenz_params** | **{str: (str,)}** | | [optional] +**auth_athenz_params** | **Dict[str, str]** | | [optional] **auth_athenz_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **auth_token_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] +**payload** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **tls_allow_insecure_connection** | **bool** | | [optional] **tls_trust_certs_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **tls_validate_hostname** | **bool** | | [optional] **topic** | **str** | | [optional] **url** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_pulsar_trigger import IoArgoprojEventsV1alpha1PulsarTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1PulsarTrigger from a JSON string +io_argoproj_events_v1alpha1_pulsar_trigger_instance = IoArgoprojEventsV1alpha1PulsarTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1PulsarTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_pulsar_trigger_dict = io_argoproj_events_v1alpha1_pulsar_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1PulsarTrigger from a dict +io_argoproj_events_v1alpha1_pulsar_trigger_form_dict = io_argoproj_events_v1alpha1_pulsar_trigger.from_dict(io_argoproj_events_v1alpha1_pulsar_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RateLimit.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1RateLimit.md index 9c1da4ce387c..cee5ef806f62 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RateLimit.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1RateLimit.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **requests_per_unit** | **int** | | [optional] **unit** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_rate_limit import IoArgoprojEventsV1alpha1RateLimit + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1RateLimit from a JSON string +io_argoproj_events_v1alpha1_rate_limit_instance = IoArgoprojEventsV1alpha1RateLimit.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1RateLimit.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_rate_limit_dict = io_argoproj_events_v1alpha1_rate_limit_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1RateLimit from a dict +io_argoproj_events_v1alpha1_rate_limit_form_dict = io_argoproj_events_v1alpha1_rate_limit.from_dict(io_argoproj_events_v1alpha1_rate_limit_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md index 7b0d7ef8c9a6..ddf1f1da4e67 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md @@ -2,20 +2,37 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**channels** | **[str]** | | [optional] +**channels** | **List[str]** | | [optional] **db** | **int** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **host_address** | **str** | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **namespace** | **str** | | [optional] **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **username** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_redis_event_source import IoArgoprojEventsV1alpha1RedisEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1RedisEventSource from a JSON string +io_argoproj_events_v1alpha1_redis_event_source_instance = IoArgoprojEventsV1alpha1RedisEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1RedisEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_redis_event_source_dict = io_argoproj_events_v1alpha1_redis_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1RedisEventSource from a dict +io_argoproj_events_v1alpha1_redis_event_source_form_dict = io_argoproj_events_v1alpha1_redis_event_source.from_dict(io_argoproj_events_v1alpha1_redis_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md index 3200d0d99e21..fdac76643997 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **consumer_group** | **str** | | [optional] @@ -9,13 +10,29 @@ Name | Type | Description | Notes **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **host_address** | **str** | | [optional] **max_msg_count_per_read** | **int** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**streams** | **[str]** | Streams to look for entries. XREADGROUP is used on all streams using a single consumer group. | [optional] +**streams** | **List[str]** | Streams to look for entries. XREADGROUP is used on all streams using a single consumer group. | [optional] **tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] **username** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_redis_stream_event_source import IoArgoprojEventsV1alpha1RedisStreamEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1RedisStreamEventSource from a JSON string +io_argoproj_events_v1alpha1_redis_stream_event_source_instance = IoArgoprojEventsV1alpha1RedisStreamEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1RedisStreamEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_redis_stream_event_source_dict = io_argoproj_events_v1alpha1_redis_stream_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1RedisStreamEventSource from a dict +io_argoproj_events_v1alpha1_redis_stream_event_source_form_dict = io_argoproj_events_v1alpha1_redis_stream_event_source.from_dict(io_argoproj_events_v1alpha1_redis_stream_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Resource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Resource.md index 45a4810d75ee..0482d86dd67b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Resource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Resource.md @@ -3,11 +3,28 @@ Resource represent arbitrary structured data. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**value** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**value** | **bytearray** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_resource import IoArgoprojEventsV1alpha1Resource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Resource from a JSON string +io_argoproj_events_v1alpha1_resource_instance = IoArgoprojEventsV1alpha1Resource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Resource.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_resource_dict = io_argoproj_events_v1alpha1_resource_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Resource from a dict +io_argoproj_events_v1alpha1_resource_form_dict = io_argoproj_events_v1alpha1_resource.from_dict(io_argoproj_events_v1alpha1_resource_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md index 77312a5274b9..ef356e9de931 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md @@ -3,15 +3,32 @@ ResourceEventSource refers to a event-source for K8s resource related events. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**event_types** | **[str]** | EventTypes is the list of event type to watch. Possible values are - ADD, UPDATE and DELETE. | [optional] +**event_types** | **List[str]** | EventTypes is the list of event type to watch. Possible values are - ADD, UPDATE and DELETE. | [optional] **filter** | [**IoArgoprojEventsV1alpha1ResourceFilter**](IoArgoprojEventsV1alpha1ResourceFilter.md) | | [optional] **group_version_resource** | [**GroupVersionResource**](GroupVersionResource.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_resource_event_source import IoArgoprojEventsV1alpha1ResourceEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1ResourceEventSource from a JSON string +io_argoproj_events_v1alpha1_resource_event_source_instance = IoArgoprojEventsV1alpha1ResourceEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1ResourceEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_resource_event_source_dict = io_argoproj_events_v1alpha1_resource_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1ResourceEventSource from a dict +io_argoproj_events_v1alpha1_resource_event_source_form_dict = io_argoproj_events_v1alpha1_resource_event_source.from_dict(io_argoproj_events_v1alpha1_resource_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md index f9f1ccec552b..a14a0fd393dc 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md @@ -2,15 +2,32 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **after_start** | **bool** | | [optional] **created_by** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] -**fields** | [**[IoArgoprojEventsV1alpha1Selector]**](IoArgoprojEventsV1alpha1Selector.md) | | [optional] -**labels** | [**[IoArgoprojEventsV1alpha1Selector]**](IoArgoprojEventsV1alpha1Selector.md) | | [optional] +**fields** | [**List[IoArgoprojEventsV1alpha1Selector]**](IoArgoprojEventsV1alpha1Selector.md) | | [optional] +**labels** | [**List[IoArgoprojEventsV1alpha1Selector]**](IoArgoprojEventsV1alpha1Selector.md) | | [optional] **prefix** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_resource_filter import IoArgoprojEventsV1alpha1ResourceFilter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1ResourceFilter from a JSON string +io_argoproj_events_v1alpha1_resource_filter_instance = IoArgoprojEventsV1alpha1ResourceFilter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1ResourceFilter.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_resource_filter_dict = io_argoproj_events_v1alpha1_resource_filter_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1ResourceFilter from a dict +io_argoproj_events_v1alpha1_resource_filter_form_dict = io_argoproj_events_v1alpha1_resource_filter.from_dict(io_argoproj_events_v1alpha1_resource_filter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md index d55a738e5a70..9f8a0c1c0289 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md @@ -2,20 +2,37 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **bucket** | [**IoArgoprojEventsV1alpha1S3Bucket**](IoArgoprojEventsV1alpha1S3Bucket.md) | | [optional] **ca_certificate** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **endpoint** | **str** | | [optional] -**events** | **[str]** | | [optional] +**events** | **List[str]** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1S3Filter**](IoArgoprojEventsV1alpha1S3Filter.md) | | [optional] **insecure** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **region** | **str** | | [optional] **secret_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1S3Artifact from a JSON string +io_argoproj_events_v1alpha1_s3_artifact_instance = IoArgoprojEventsV1alpha1S3Artifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1S3Artifact.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_s3_artifact_dict = io_argoproj_events_v1alpha1_s3_artifact_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1S3Artifact from a dict +io_argoproj_events_v1alpha1_s3_artifact_form_dict = io_argoproj_events_v1alpha1_s3_artifact.from_dict(io_argoproj_events_v1alpha1_s3_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md index a1571f97912c..9dd02c803dfb 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | | [optional] **name** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_bucket import IoArgoprojEventsV1alpha1S3Bucket + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1S3Bucket from a JSON string +io_argoproj_events_v1alpha1_s3_bucket_instance = IoArgoprojEventsV1alpha1S3Bucket.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1S3Bucket.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_s3_bucket_dict = io_argoproj_events_v1alpha1_s3_bucket_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1S3Bucket from a dict +io_argoproj_events_v1alpha1_s3_bucket_form_dict = io_argoproj_events_v1alpha1_s3_bucket.from_dict(io_argoproj_events_v1alpha1_s3_bucket_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Filter.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Filter.md index c9254327b1e6..b78a0cabcce5 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Filter.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Filter.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **prefix** | **str** | | [optional] **suffix** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_s3_filter import IoArgoprojEventsV1alpha1S3Filter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1S3Filter from a JSON string +io_argoproj_events_v1alpha1_s3_filter_instance = IoArgoprojEventsV1alpha1S3Filter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1S3Filter.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_s3_filter_dict = io_argoproj_events_v1alpha1_s3_filter_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1S3Filter from a dict +io_argoproj_events_v1alpha1_s3_filter_form_dict = io_argoproj_events_v1alpha1_s3_filter.from_dict(io_argoproj_events_v1alpha1_s3_filter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md index 1924a4e99c90..8bd2c9bf9146 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **mechanism** | **str** | | [optional] **password_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **user_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SASLConfig from a JSON string +io_argoproj_events_v1alpha1_sasl_config_instance = IoArgoprojEventsV1alpha1SASLConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SASLConfig.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_sasl_config_dict = io_argoproj_events_v1alpha1_sasl_config_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SASLConfig from a dict +io_argoproj_events_v1alpha1_sasl_config_form_dict = io_argoproj_events_v1alpha1_sasl_config.from_dict(io_argoproj_events_v1alpha1_sasl_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md index f6bdf25a6782..b555ce48ad59 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md @@ -3,19 +3,36 @@ SFTPEventSource describes an event-source for sftp related events. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **address** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **event_type** | **str** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **poll_interval_duration** | **str** | | [optional] **ssh_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **username** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **watch_path_config** | [**IoArgoprojEventsV1alpha1WatchPathConfig**](IoArgoprojEventsV1alpha1WatchPathConfig.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_sftp_event_source import IoArgoprojEventsV1alpha1SFTPEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SFTPEventSource from a JSON string +io_argoproj_events_v1alpha1_sftp_event_source_instance = IoArgoprojEventsV1alpha1SFTPEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SFTPEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_sftp_event_source_dict = io_argoproj_events_v1alpha1_sftp_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SFTPEventSource from a dict +io_argoproj_events_v1alpha1_sftp_event_source_form_dict = io_argoproj_events_v1alpha1_sftp_event_source.from_dict(io_argoproj_events_v1alpha1_sftp_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md index b0290d3e73f3..f84bb2b0aa82 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md @@ -2,20 +2,37 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **endpoint** | **str** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **region** | **str** | | [optional] **role_arn** | **str** | | [optional] **secret_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **topic_arn** | **str** | | [optional] **validate_signature** | **bool** | | [optional] **webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_sns_event_source import IoArgoprojEventsV1alpha1SNSEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SNSEventSource from a JSON string +io_argoproj_events_v1alpha1_sns_event_source_instance = IoArgoprojEventsV1alpha1SNSEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SNSEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_sns_event_source_dict = io_argoproj_events_v1alpha1_sns_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SNSEventSource from a dict +io_argoproj_events_v1alpha1_sns_event_source_form_dict = io_argoproj_events_v1alpha1_sns_event_source.from_dict(io_argoproj_events_v1alpha1_sns_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md index fe350f9f4a95..7a0556749209 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] @@ -9,7 +10,7 @@ Name | Type | Description | Notes **endpoint** | **str** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **queue** | **str** | | [optional] **queue_account_id** | **str** | | [optional] **region** | **str** | | [optional] @@ -17,8 +18,24 @@ Name | Type | Description | Notes **secret_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **session_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **wait_time_seconds** | **str** | WaitTimeSeconds is The duration (in seconds) for which the call waits for a message to arrive in the queue before returning. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_sqs_event_source import IoArgoprojEventsV1alpha1SQSEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SQSEventSource from a JSON string +io_argoproj_events_v1alpha1_sqs_event_source_instance = IoArgoprojEventsV1alpha1SQSEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SQSEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_sqs_event_source_dict = io_argoproj_events_v1alpha1_sqs_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SQSEventSource from a dict +io_argoproj_events_v1alpha1_sqs_event_source_form_dict = io_argoproj_events_v1alpha1_sqs_event_source.from_dict(io_argoproj_events_v1alpha1_sqs_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md index 6e148cf15e0a..e4905f4f5be2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] **schema_id** | **int** | | [optional] **url** | **str** | Schema Registry URL. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_schema_registry_config import IoArgoprojEventsV1alpha1SchemaRegistryConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SchemaRegistryConfig from a JSON string +io_argoproj_events_v1alpha1_schema_registry_config_instance = IoArgoprojEventsV1alpha1SchemaRegistryConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SchemaRegistryConfig.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_schema_registry_config_dict = io_argoproj_events_v1alpha1_schema_registry_config_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SchemaRegistryConfig from a dict +io_argoproj_events_v1alpha1_schema_registry_config_form_dict = io_argoproj_events_v1alpha1_schema_registry_config.from_dict(io_argoproj_events_v1alpha1_schema_registry_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md index e71ac03b86d0..e5df79d44536 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **value_from** | [**IoArgoprojEventsV1alpha1ValueFromSource**](IoArgoprojEventsV1alpha1ValueFromSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_secure_header import IoArgoprojEventsV1alpha1SecureHeader + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SecureHeader from a JSON string +io_argoproj_events_v1alpha1_secure_header_instance = IoArgoprojEventsV1alpha1SecureHeader.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SecureHeader.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_secure_header_dict = io_argoproj_events_v1alpha1_secure_header_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SecureHeader from a dict +io_argoproj_events_v1alpha1_secure_header_form_dict = io_argoproj_events_v1alpha1_secure_header.from_dict(io_argoproj_events_v1alpha1_secure_header_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Selector.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Selector.md index 40dd74f867f5..80c65c73e45c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Selector.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Selector.md @@ -3,13 +3,30 @@ Selector represents conditional operation to select K8s objects. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | | [optional] **operation** | **str** | | [optional] **value** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_selector import IoArgoprojEventsV1alpha1Selector + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Selector from a JSON string +io_argoproj_events_v1alpha1_selector_instance = IoArgoprojEventsV1alpha1Selector.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Selector.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_selector_dict = io_argoproj_events_v1alpha1_selector_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Selector from a dict +io_argoproj_events_v1alpha1_selector_form_dict = io_argoproj_events_v1alpha1_selector.from_dict(io_argoproj_events_v1alpha1_selector_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Sensor.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Sensor.md index 5821251facad..d583b1c9c08d 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Sensor.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Sensor.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] **spec** | [**IoArgoprojEventsV1alpha1SensorSpec**](IoArgoprojEventsV1alpha1SensorSpec.md) | | [optional] **status** | [**IoArgoprojEventsV1alpha1SensorStatus**](IoArgoprojEventsV1alpha1SensorStatus.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Sensor from a JSON string +io_argoproj_events_v1alpha1_sensor_instance = IoArgoprojEventsV1alpha1Sensor.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Sensor.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_sensor_dict = io_argoproj_events_v1alpha1_sensor_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Sensor from a dict +io_argoproj_events_v1alpha1_sensor_form_dict = io_argoproj_events_v1alpha1_sensor.from_dict(io_argoproj_events_v1alpha1_sensor_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorList.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorList.md index c0125f170b40..96390edaa283 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorList.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorList.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[IoArgoprojEventsV1alpha1Sensor]**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] +**items** | [**List[IoArgoprojEventsV1alpha1Sensor]**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] **metadata** | [**ListMeta**](ListMeta.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SensorList from a JSON string +io_argoproj_events_v1alpha1_sensor_list_instance = IoArgoprojEventsV1alpha1SensorList.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SensorList.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_sensor_list_dict = io_argoproj_events_v1alpha1_sensor_list_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SensorList from a dict +io_argoproj_events_v1alpha1_sensor_list_form_dict = io_argoproj_events_v1alpha1_sensor_list.from_dict(io_argoproj_events_v1alpha1_sensor_list_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md index 597a071caf9d..19ca7cbf46c6 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md @@ -2,18 +2,35 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**dependencies** | [**[IoArgoprojEventsV1alpha1EventDependency]**](IoArgoprojEventsV1alpha1EventDependency.md) | Dependencies is a list of the events that this sensor is dependent on. | [optional] +**dependencies** | [**List[IoArgoprojEventsV1alpha1EventDependency]**](IoArgoprojEventsV1alpha1EventDependency.md) | Dependencies is a list of the events that this sensor is dependent on. | [optional] **error_on_failed_round** | **bool** | ErrorOnFailedRound if set to true, marks sensor state as `error` if the previous trigger round fails. Once sensor state is set to `error`, no further triggers will be processed. | [optional] **event_bus_name** | **str** | | [optional] -**logging_fields** | **{str: (str,)}** | | [optional] +**logging_fields** | **Dict[str, str]** | | [optional] **replicas** | **int** | | [optional] **revision_history_limit** | **int** | | [optional] **template** | [**IoArgoprojEventsV1alpha1Template**](IoArgoprojEventsV1alpha1Template.md) | | [optional] -**triggers** | [**[IoArgoprojEventsV1alpha1Trigger]**](IoArgoprojEventsV1alpha1Trigger.md) | Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**triggers** | [**List[IoArgoprojEventsV1alpha1Trigger]**](IoArgoprojEventsV1alpha1Trigger.md) | Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor. | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_spec import IoArgoprojEventsV1alpha1SensorSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SensorSpec from a JSON string +io_argoproj_events_v1alpha1_sensor_spec_instance = IoArgoprojEventsV1alpha1SensorSpec.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SensorSpec.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_sensor_spec_dict = io_argoproj_events_v1alpha1_sensor_spec_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SensorSpec from a dict +io_argoproj_events_v1alpha1_sensor_spec_form_dict = io_argoproj_events_v1alpha1_sensor_spec.from_dict(io_argoproj_events_v1alpha1_sensor_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md index 4a0ffb5ac287..cadaa587ac87 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md @@ -3,11 +3,28 @@ SensorStatus contains information about the status of a sensor. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **status** | [**IoArgoprojEventsV1alpha1Status**](IoArgoprojEventsV1alpha1Status.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_status import IoArgoprojEventsV1alpha1SensorStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SensorStatus from a JSON string +io_argoproj_events_v1alpha1_sensor_status_instance = IoArgoprojEventsV1alpha1SensorStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SensorStatus.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_sensor_status_dict = io_argoproj_events_v1alpha1_sensor_status_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SensorStatus from a dict +io_argoproj_events_v1alpha1_sensor_status_form_dict = io_argoproj_events_v1alpha1_sensor_status.from_dict(io_argoproj_events_v1alpha1_sensor_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Service.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Service.md index 172180919f34..31fc36ebe450 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Service.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Service.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cluster_ip** | **str** | | [optional] -**ports** | [**[ServicePort]**](ServicePort.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**ports** | [**List[ServicePort]**](ServicePort.md) | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_service import IoArgoprojEventsV1alpha1Service + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Service from a JSON string +io_argoproj_events_v1alpha1_service_instance = IoArgoprojEventsV1alpha1Service.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Service.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_service_dict = io_argoproj_events_v1alpha1_service_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Service from a dict +io_argoproj_events_v1alpha1_service_form_dict = io_argoproj_events_v1alpha1_service.from_dict(io_argoproj_events_v1alpha1_service_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md index a2d759744472..5a597d167b81 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md @@ -2,15 +2,32 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **signing_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_event_source import IoArgoprojEventsV1alpha1SlackEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SlackEventSource from a JSON string +io_argoproj_events_v1alpha1_slack_event_source_instance = IoArgoprojEventsV1alpha1SlackEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SlackEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_slack_event_source_dict = io_argoproj_events_v1alpha1_slack_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SlackEventSource from a dict +io_argoproj_events_v1alpha1_slack_event_source_form_dict = io_argoproj_events_v1alpha1_slack_event_source.from_dict(io_argoproj_events_v1alpha1_slack_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackSender.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackSender.md index f6b117aa48ca..ae130977e334 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackSender.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackSender.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **icon** | **str** | | [optional] **username** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_sender import IoArgoprojEventsV1alpha1SlackSender + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SlackSender from a JSON string +io_argoproj_events_v1alpha1_slack_sender_instance = IoArgoprojEventsV1alpha1SlackSender.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SlackSender.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_slack_sender_dict = io_argoproj_events_v1alpha1_slack_sender_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SlackSender from a dict +io_argoproj_events_v1alpha1_slack_sender_form_dict = io_argoproj_events_v1alpha1_slack_sender.from_dict(io_argoproj_events_v1alpha1_slack_sender_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackThread.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackThread.md index ab92f1436bfa..e1fd58edfa7b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackThread.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackThread.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **broadcast_message_to_channel** | **bool** | | [optional] **message_aggregation_key** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_thread import IoArgoprojEventsV1alpha1SlackThread + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SlackThread from a JSON string +io_argoproj_events_v1alpha1_slack_thread_instance = IoArgoprojEventsV1alpha1SlackThread.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SlackThread.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_slack_thread_dict = io_argoproj_events_v1alpha1_slack_thread_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SlackThread from a dict +io_argoproj_events_v1alpha1_slack_thread_form_dict = io_argoproj_events_v1alpha1_slack_thread.from_dict(io_argoproj_events_v1alpha1_slack_thread_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md index dd7319e691a2..5f7e12691b5e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md @@ -3,18 +3,35 @@ SlackTrigger refers to the specification of the slack notification trigger. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **attachments** | **str** | | [optional] **blocks** | **str** | | [optional] **channel** | **str** | | [optional] **message** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] **sender** | [**IoArgoprojEventsV1alpha1SlackSender**](IoArgoprojEventsV1alpha1SlackSender.md) | | [optional] **slack_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **thread** | [**IoArgoprojEventsV1alpha1SlackThread**](IoArgoprojEventsV1alpha1SlackThread.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_slack_trigger import IoArgoprojEventsV1alpha1SlackTrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1SlackTrigger from a JSON string +io_argoproj_events_v1alpha1_slack_trigger_instance = IoArgoprojEventsV1alpha1SlackTrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1SlackTrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_slack_trigger_dict = io_argoproj_events_v1alpha1_slack_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1SlackTrigger from a dict +io_argoproj_events_v1alpha1_slack_trigger_form_dict = io_argoproj_events_v1alpha1_slack_trigger.from_dict(io_argoproj_events_v1alpha1_slack_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md index 2d67821586bc..459a22f00cfc 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md @@ -2,15 +2,32 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **live_object** | **bool** | | [optional] **operation** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved K8s trigger object. | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved K8s trigger object. | [optional] **patch_strategy** | **str** | | [optional] **source** | [**IoArgoprojEventsV1alpha1ArtifactLocation**](IoArgoprojEventsV1alpha1ArtifactLocation.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_standard_k8_s_trigger import IoArgoprojEventsV1alpha1StandardK8STrigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1StandardK8STrigger from a JSON string +io_argoproj_events_v1alpha1_standard_k8_s_trigger_instance = IoArgoprojEventsV1alpha1StandardK8STrigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1StandardK8STrigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_standard_k8_s_trigger_dict = io_argoproj_events_v1alpha1_standard_k8_s_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1StandardK8STrigger from a dict +io_argoproj_events_v1alpha1_standard_k8_s_trigger_form_dict = io_argoproj_events_v1alpha1_standard_k8_s_trigger.from_dict(io_argoproj_events_v1alpha1_standard_k8_s_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Status.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Status.md index 0c38640bbc90..c6a680212454 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Status.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Status.md @@ -3,11 +3,28 @@ Status is a common structure which can be used for Status field. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**conditions** | [**[IoArgoprojEventsV1alpha1Condition]**](IoArgoprojEventsV1alpha1Condition.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**conditions** | [**List[IoArgoprojEventsV1alpha1Condition]**](IoArgoprojEventsV1alpha1Condition.md) | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Status from a JSON string +io_argoproj_events_v1alpha1_status_instance = IoArgoprojEventsV1alpha1Status.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Status.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_status_dict = io_argoproj_events_v1alpha1_status_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Status from a dict +io_argoproj_events_v1alpha1_status_form_dict = io_argoproj_events_v1alpha1_status.from_dict(io_argoproj_events_v1alpha1_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md index 7a24c62f85aa..d169e5f915f0 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**allow** | **[int]** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**allow** | **List[int]** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_status_policy import IoArgoprojEventsV1alpha1StatusPolicy + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1StatusPolicy from a JSON string +io_argoproj_events_v1alpha1_status_policy_instance = IoArgoprojEventsV1alpha1StatusPolicy.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1StatusPolicy.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_status_policy_dict = io_argoproj_events_v1alpha1_status_policy_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1StatusPolicy from a dict +io_argoproj_events_v1alpha1_status_policy_form_dict = io_argoproj_events_v1alpha1_status_policy.from_dict(io_argoproj_events_v1alpha1_status_policy_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md index 7ecb7ca43a1d..701b006eed9e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md @@ -2,19 +2,36 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **api_url** | **str** | APIURL is the url of the storagegrid api. | [optional] **auth_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **bucket** | **str** | Name of the bucket to register notifications for. | [optional] -**events** | **[str]** | | [optional] +**events** | **List[str]** | | [optional] **filter** | [**IoArgoprojEventsV1alpha1StorageGridFilter**](IoArgoprojEventsV1alpha1StorageGridFilter.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **region** | **str** | | [optional] **topic_arn** | **str** | | [optional] **webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_storage_grid_event_source import IoArgoprojEventsV1alpha1StorageGridEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1StorageGridEventSource from a JSON string +io_argoproj_events_v1alpha1_storage_grid_event_source_instance = IoArgoprojEventsV1alpha1StorageGridEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1StorageGridEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_storage_grid_event_source_dict = io_argoproj_events_v1alpha1_storage_grid_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1StorageGridEventSource from a dict +io_argoproj_events_v1alpha1_storage_grid_event_source_form_dict = io_argoproj_events_v1alpha1_storage_grid_event_source.from_dict(io_argoproj_events_v1alpha1_storage_grid_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md index 4427ab20b072..ded5168650d2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **prefix** | **str** | | [optional] **suffix** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_storage_grid_filter import IoArgoprojEventsV1alpha1StorageGridFilter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1StorageGridFilter from a JSON string +io_argoproj_events_v1alpha1_storage_grid_filter_instance = IoArgoprojEventsV1alpha1StorageGridFilter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1StorageGridFilter.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_storage_grid_filter_dict = io_argoproj_events_v1alpha1_storage_grid_filter_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1StorageGridFilter from a dict +io_argoproj_events_v1alpha1_storage_grid_filter_form_dict = io_argoproj_events_v1alpha1_storage_grid_filter.from_dict(io_argoproj_events_v1alpha1_storage_grid_filter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md index b23eb28bc1a6..2f472c99d454 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md @@ -2,15 +2,32 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **api_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **create_webhook** | **bool** | | [optional] -**event_filter** | **[str]** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**event_filter** | **List[str]** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_stripe_event_source import IoArgoprojEventsV1alpha1StripeEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1StripeEventSource from a JSON string +io_argoproj_events_v1alpha1_stripe_event_source_instance = IoArgoprojEventsV1alpha1StripeEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1StripeEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_stripe_event_source_dict = io_argoproj_events_v1alpha1_stripe_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1StripeEventSource from a dict +io_argoproj_events_v1alpha1_stripe_event_source_form_dict = io_argoproj_events_v1alpha1_stripe_event_source.from_dict(io_argoproj_events_v1alpha1_stripe_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md index 20810d5d7563..5c470a53bc1f 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md @@ -3,14 +3,31 @@ TLSConfig refers to TLS configuration for a client. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **ca_cert_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **client_cert_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **client_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **insecure_skip_verify** | **bool** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1TLSConfig from a JSON string +io_argoproj_events_v1alpha1_tls_config_instance = IoArgoprojEventsV1alpha1TLSConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1TLSConfig.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_tls_config_dict = io_argoproj_events_v1alpha1_tls_config_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1TLSConfig from a dict +io_argoproj_events_v1alpha1_tls_config_form_dict = io_argoproj_events_v1alpha1_tls_config.from_dict(io_argoproj_events_v1alpha1_tls_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Template.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Template.md index cbdbe9bc7ad7..5787ad2ea06e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Template.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Template.md @@ -2,21 +2,38 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **affinity** | [**Affinity**](Affinity.md) | | [optional] **container** | [**Container**](Container.md) | | [optional] -**image_pull_secrets** | [**[LocalObjectReference]**](LocalObjectReference.md) | | [optional] +**image_pull_secrets** | [**List[LocalObjectReference]**](LocalObjectReference.md) | | [optional] **metadata** | [**IoArgoprojEventsV1alpha1Metadata**](IoArgoprojEventsV1alpha1Metadata.md) | | [optional] -**node_selector** | **{str: (str,)}** | | [optional] +**node_selector** | **Dict[str, str]** | | [optional] **priority** | **int** | | [optional] **priority_class_name** | **str** | | [optional] **security_context** | [**PodSecurityContext**](PodSecurityContext.md) | | [optional] **service_account_name** | **str** | | [optional] -**tolerations** | [**[Toleration]**](Toleration.md) | | [optional] -**volumes** | [**[Volume]**](Volume.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**tolerations** | [**List[Toleration]**](Toleration.md) | | [optional] +**volumes** | [**List[Volume]**](Volume.md) | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Template from a JSON string +io_argoproj_events_v1alpha1_template_instance = IoArgoprojEventsV1alpha1Template.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Template.to_json()) +# convert the object into a dict +io_argoproj_events_v1alpha1_template_dict = io_argoproj_events_v1alpha1_template_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Template from a dict +io_argoproj_events_v1alpha1_template_form_dict = io_argoproj_events_v1alpha1_template.from_dict(io_argoproj_events_v1alpha1_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md index b16640181dcc..42d1ae6a3aaf 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md @@ -3,12 +3,29 @@ TimeFilter describes a window in time. It filters out events that occur outside the time limits. In other words, only events that occur after Start and before Stop will pass this filter. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **start** | **str** | Start is the beginning of a time window in UTC. Before this time, events for this dependency are ignored. Format is hh:mm:ss. | [optional] **stop** | **str** | Stop is the end of a time window in UTC. After or equal to this time, events for this dependency are ignored and Format is hh:mm:ss. If it is smaller than Start, it is treated as next day of Start (e.g.: 22:00:00-01:00:00 means 22:00:00-25:00:00). | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_time_filter import IoArgoprojEventsV1alpha1TimeFilter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1TimeFilter from a JSON string +io_argoproj_events_v1alpha1_time_filter_instance = IoArgoprojEventsV1alpha1TimeFilter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1TimeFilter.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_time_filter_dict = io_argoproj_events_v1alpha1_time_filter_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1TimeFilter from a dict +io_argoproj_events_v1alpha1_time_filter_form_dict = io_argoproj_events_v1alpha1_time_filter.from_dict(io_argoproj_events_v1alpha1_time_filter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Trigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Trigger.md index 958a9dd01cf1..972b664c2a22 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Trigger.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Trigger.md @@ -2,16 +2,33 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **at_least_once** | **bool** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**parameters** | [**List[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] **policy** | [**IoArgoprojEventsV1alpha1TriggerPolicy**](IoArgoprojEventsV1alpha1TriggerPolicy.md) | | [optional] **rate_limit** | [**IoArgoprojEventsV1alpha1RateLimit**](IoArgoprojEventsV1alpha1RateLimit.md) | | [optional] **retry_strategy** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] **template** | [**IoArgoprojEventsV1alpha1TriggerTemplate**](IoArgoprojEventsV1alpha1TriggerTemplate.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger import IoArgoprojEventsV1alpha1Trigger + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1Trigger from a JSON string +io_argoproj_events_v1alpha1_trigger_instance = IoArgoprojEventsV1alpha1Trigger.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1Trigger.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_trigger_dict = io_argoproj_events_v1alpha1_trigger_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1Trigger from a dict +io_argoproj_events_v1alpha1_trigger_form_dict = io_argoproj_events_v1alpha1_trigger.from_dict(io_argoproj_events_v1alpha1_trigger_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md index 533abe24f57c..8af7373bbf5c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **dest** | **str** | Dest is the JSONPath of a resource key. A path is a series of keys separated by a dot. The colon character can be escaped with '.' The -1 key can be used to append a value to an existing array. See https://github.com/tidwall/sjson#path-syntax for more information about how this is used. | [optional] **operation** | **str** | Operation is what to do with the existing value at Dest, whether to 'prepend', 'overwrite', or 'append' it. | [optional] **src** | [**IoArgoprojEventsV1alpha1TriggerParameterSource**](IoArgoprojEventsV1alpha1TriggerParameterSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1TriggerParameter from a JSON string +io_argoproj_events_v1alpha1_trigger_parameter_instance = IoArgoprojEventsV1alpha1TriggerParameter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1TriggerParameter.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_trigger_parameter_dict = io_argoproj_events_v1alpha1_trigger_parameter_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1TriggerParameter from a dict +io_argoproj_events_v1alpha1_trigger_parameter_form_dict = io_argoproj_events_v1alpha1_trigger_parameter.from_dict(io_argoproj_events_v1alpha1_trigger_parameter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md index 92f928eba30b..5d3ca8bacd32 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **context_key** | **str** | ContextKey is the JSONPath of the event's (JSON decoded) context key ContextKey is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'. To access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'. See https://github.com/tidwall/gjson#path-syntax for more information on how to use this. | [optional] @@ -11,8 +12,24 @@ Name | Type | Description | Notes **dependency_name** | **str** | DependencyName refers to the name of the dependency. The event which is stored for this dependency is used as payload for the parameterization. Make sure to refer to one of the dependencies you have defined under Dependencies list. | [optional] **use_raw_data** | **bool** | | [optional] **value** | **str** | Value is the default literal value to use for this parameter source This is only used if the DataKey is invalid. If the DataKey is invalid and this is not defined, this param source will produce an error. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_parameter_source import IoArgoprojEventsV1alpha1TriggerParameterSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1TriggerParameterSource from a JSON string +io_argoproj_events_v1alpha1_trigger_parameter_source_instance = IoArgoprojEventsV1alpha1TriggerParameterSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1TriggerParameterSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_trigger_parameter_source_dict = io_argoproj_events_v1alpha1_trigger_parameter_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1TriggerParameterSource from a dict +io_argoproj_events_v1alpha1_trigger_parameter_source_form_dict = io_argoproj_events_v1alpha1_trigger_parameter_source.from_dict(io_argoproj_events_v1alpha1_trigger_parameter_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md index 486b013ab84d..1b47a07395d0 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **k8s** | [**IoArgoprojEventsV1alpha1K8SResourcePolicy**](IoArgoprojEventsV1alpha1K8SResourcePolicy.md) | | [optional] **status** | [**IoArgoprojEventsV1alpha1StatusPolicy**](IoArgoprojEventsV1alpha1StatusPolicy.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_policy import IoArgoprojEventsV1alpha1TriggerPolicy + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1TriggerPolicy from a JSON string +io_argoproj_events_v1alpha1_trigger_policy_instance = IoArgoprojEventsV1alpha1TriggerPolicy.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1TriggerPolicy.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_trigger_policy_dict = io_argoproj_events_v1alpha1_trigger_policy_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1TriggerPolicy from a dict +io_argoproj_events_v1alpha1_trigger_policy_form_dict = io_argoproj_events_v1alpha1_trigger_policy.from_dict(io_argoproj_events_v1alpha1_trigger_policy_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md index 8fa1fb8a116c..084bddf7d954 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md @@ -3,6 +3,7 @@ TriggerTemplate is the template that describes trigger specification. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **argo_workflow** | [**IoArgoprojEventsV1alpha1ArgoWorkflowTrigger**](IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md) | | [optional] @@ -10,7 +11,7 @@ Name | Type | Description | Notes **azure_event_hubs** | [**IoArgoprojEventsV1alpha1AzureEventHubsTrigger**](IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md) | | [optional] **azure_service_bus** | [**IoArgoprojEventsV1alpha1AzureServiceBusTrigger**](IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md) | | [optional] **conditions** | **str** | | [optional] -**conditions_reset** | [**[IoArgoprojEventsV1alpha1ConditionsResetCriteria]**](IoArgoprojEventsV1alpha1ConditionsResetCriteria.md) | | [optional] +**conditions_reset** | [**List[IoArgoprojEventsV1alpha1ConditionsResetCriteria]**](IoArgoprojEventsV1alpha1ConditionsResetCriteria.md) | | [optional] **custom** | [**IoArgoprojEventsV1alpha1CustomTrigger**](IoArgoprojEventsV1alpha1CustomTrigger.md) | | [optional] **email** | [**IoArgoprojEventsV1alpha1EmailTrigger**](IoArgoprojEventsV1alpha1EmailTrigger.md) | | [optional] **http** | [**IoArgoprojEventsV1alpha1HTTPTrigger**](IoArgoprojEventsV1alpha1HTTPTrigger.md) | | [optional] @@ -22,8 +23,24 @@ Name | Type | Description | Notes **open_whisk** | [**IoArgoprojEventsV1alpha1OpenWhiskTrigger**](IoArgoprojEventsV1alpha1OpenWhiskTrigger.md) | | [optional] **pulsar** | [**IoArgoprojEventsV1alpha1PulsarTrigger**](IoArgoprojEventsV1alpha1PulsarTrigger.md) | | [optional] **slack** | [**IoArgoprojEventsV1alpha1SlackTrigger**](IoArgoprojEventsV1alpha1SlackTrigger.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_trigger_template import IoArgoprojEventsV1alpha1TriggerTemplate + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1TriggerTemplate from a JSON string +io_argoproj_events_v1alpha1_trigger_template_instance = IoArgoprojEventsV1alpha1TriggerTemplate.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1TriggerTemplate.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_trigger_template_dict = io_argoproj_events_v1alpha1_trigger_template_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1TriggerTemplate from a dict +io_argoproj_events_v1alpha1_trigger_template_form_dict = io_argoproj_events_v1alpha1_trigger_template.from_dict(io_argoproj_events_v1alpha1_trigger_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md index 48dd7f10d23d..59583296cf53 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md @@ -3,12 +3,29 @@ URLArtifact contains information about an artifact at an http endpoint. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **path** | **str** | | [optional] **verify_cert** | **bool** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_url_artifact import IoArgoprojEventsV1alpha1URLArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1URLArtifact from a JSON string +io_argoproj_events_v1alpha1_url_artifact_instance = IoArgoprojEventsV1alpha1URLArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1URLArtifact.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_url_artifact_dict = io_argoproj_events_v1alpha1_url_artifact_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1URLArtifact from a dict +io_argoproj_events_v1alpha1_url_artifact_form_dict = io_argoproj_events_v1alpha1_url_artifact.from_dict(io_argoproj_events_v1alpha1_url_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md index 6feb2846fb32..d57b6a157041 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config_map_key_ref** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | [optional] **secret_key_ref** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_value_from_source import IoArgoprojEventsV1alpha1ValueFromSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1ValueFromSource from a JSON string +io_argoproj_events_v1alpha1_value_from_source_instance = IoArgoprojEventsV1alpha1ValueFromSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1ValueFromSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_value_from_source_dict = io_argoproj_events_v1alpha1_value_from_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1ValueFromSource from a dict +io_argoproj_events_v1alpha1_value_from_source_form_dict = io_argoproj_events_v1alpha1_value_from_source.from_dict(io_argoproj_events_v1alpha1_value_from_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md index 9594f1962997..32c52bcbb84f 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **directory** | **str** | | [optional] **path** | **str** | | [optional] **path_regexp** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1WatchPathConfig from a JSON string +io_argoproj_events_v1alpha1_watch_path_config_instance = IoArgoprojEventsV1alpha1WatchPathConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1WatchPathConfig.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_watch_path_config_dict = io_argoproj_events_v1alpha1_watch_path_config_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1WatchPathConfig from a dict +io_argoproj_events_v1alpha1_watch_path_config_form_dict = io_argoproj_events_v1alpha1_watch_path_config.from_dict(io_argoproj_events_v1alpha1_watch_path_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md index 4dfce684b23d..a646d3c8b25f 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md @@ -2,19 +2,36 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auth_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **endpoint** | **str** | | [optional] **max_payload_size** | **str** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] +**metadata** | **Dict[str, str]** | | [optional] **method** | **str** | | [optional] **port** | **str** | Port on which HTTP server is listening for incoming events. | [optional] **server_cert_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **server_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **url** | **str** | URL is the url of the server. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1WebhookContext from a JSON string +io_argoproj_events_v1alpha1_webhook_context_instance = IoArgoprojEventsV1alpha1WebhookContext.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1WebhookContext.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_webhook_context_dict = io_argoproj_events_v1alpha1_webhook_context_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1WebhookContext from a dict +io_argoproj_events_v1alpha1_webhook_context_form_dict = io_argoproj_events_v1alpha1_webhook_context.from_dict(io_argoproj_events_v1alpha1_webhook_context_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md index be3c1b1659a2..8366ef211dcf 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md +++ b/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] **webhook_context** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_events_v1alpha1_webhook_event_source import IoArgoprojEventsV1alpha1WebhookEventSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojEventsV1alpha1WebhookEventSource from a JSON string +io_argoproj_events_v1alpha1_webhook_event_source_instance = IoArgoprojEventsV1alpha1WebhookEventSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojEventsV1alpha1WebhookEventSource.to_json()) + +# convert the object into a dict +io_argoproj_events_v1alpha1_webhook_event_source_dict = io_argoproj_events_v1alpha1_webhook_event_source_instance.to_dict() +# create an instance of IoArgoprojEventsV1alpha1WebhookEventSource from a dict +io_argoproj_events_v1alpha1_webhook_event_source_form_dict = io_argoproj_events_v1alpha1_webhook_event_source.from_dict(io_argoproj_events_v1alpha1_webhook_event_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArchiveStrategy.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArchiveStrategy.md index adba6c157cde..fdbed20ad4b3 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArchiveStrategy.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArchiveStrategy.md @@ -3,13 +3,30 @@ ArchiveStrategy describes how to archive files/directory when saving artifacts ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**_none** | **bool, date, datetime, dict, float, int, list, str, none_type** | NoneStrategy indicates to skip tar process and upload the files or directory tree as independent files. Note that if the artifact is a directory, the artifact driver must support the ability to save/load the directory appropriately. | [optional] +**var_none** | **object** | NoneStrategy indicates to skip tar process and upload the files or directory tree as independent files. Note that if the artifact is a directory, the artifact driver must support the ability to save/load the directory appropriately. | [optional] **tar** | [**IoArgoprojWorkflowV1alpha1TarStrategy**](IoArgoprojWorkflowV1alpha1TarStrategy.md) | | [optional] -**zip** | **bool, date, datetime, dict, float, int, list, str, none_type** | ZipStrategy will unzip zipped input artifacts | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**zip** | **object** | ZipStrategy will unzip zipped input artifacts | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArchiveStrategy from a JSON string +io_argoproj_workflow_v1alpha1_archive_strategy_instance = IoArgoprojWorkflowV1alpha1ArchiveStrategy.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArchiveStrategy.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_archive_strategy_dict = io_argoproj_workflow_v1alpha1_archive_strategy_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArchiveStrategy from a dict +io_argoproj_workflow_v1alpha1_archive_strategy_form_dict = io_argoproj_workflow_v1alpha1_archive_strategy.from_dict(io_argoproj_workflow_v1alpha1_archive_strategy_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Arguments.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Arguments.md index 8ba602295da9..575db9f62440 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Arguments.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Arguments.md @@ -3,12 +3,29 @@ Arguments to a template ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**artifacts** | [**[IoArgoprojWorkflowV1alpha1Artifact]**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifacts is the list of artifacts to pass to the template or workflow | [optional] -**parameters** | [**[IoArgoprojWorkflowV1alpha1Parameter]**](IoArgoprojWorkflowV1alpha1Parameter.md) | Parameters is the list of parameters to pass to the template or workflow | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**artifacts** | [**List[IoArgoprojWorkflowV1alpha1Artifact]**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifacts is the list of artifacts to pass to the template or workflow | [optional] +**parameters** | [**List[IoArgoprojWorkflowV1alpha1Parameter]**](IoArgoprojWorkflowV1alpha1Parameter.md) | Parameters is the list of parameters to pass to the template or workflow | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Arguments from a JSON string +io_argoproj_workflow_v1alpha1_arguments_instance = IoArgoprojWorkflowV1alpha1Arguments.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Arguments.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_arguments_dict = io_argoproj_workflow_v1alpha1_arguments_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Arguments from a dict +io_argoproj_workflow_v1alpha1_arguments_form_dict = io_argoproj_workflow_v1alpha1_arguments.from_dict(io_argoproj_workflow_v1alpha1_arguments_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md index 35ac7533d9ae..7d7392d1c289 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtGCStatus.md @@ -3,13 +3,30 @@ ArtGCStatus maintains state related to ArtifactGC ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **not_specified** | **bool** | if this is true, we already checked to see if we need to do it and we don't | [optional] -**pods_recouped** | **{str: (bool,)}** | have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once | [optional] -**strategies_processed** | **{str: (bool,)}** | have Pods been started to perform this strategy? (enables us not to re-process what we've already done) | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**pods_recouped** | **Dict[str, bool]** | have completed Pods been processed? (mapped by Pod name) used to prevent re-processing the Status of a Pod more than once | [optional] +**strategies_processed** | **Dict[str, bool]** | have Pods been started to perform this strategy? (enables us not to re-process what we've already done) | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_art_gc_status import IoArgoprojWorkflowV1alpha1ArtGCStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtGCStatus from a JSON string +io_argoproj_workflow_v1alpha1_art_gc_status_instance = IoArgoprojWorkflowV1alpha1ArtGCStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtGCStatus.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_art_gc_status_dict = io_argoproj_workflow_v1alpha1_art_gc_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtGCStatus from a dict +io_argoproj_workflow_v1alpha1_art_gc_status_form_dict = io_argoproj_workflow_v1alpha1_art_gc_status.from_dict(io_argoproj_workflow_v1alpha1_art_gc_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md index 84af08865d9e..780756c1eb02 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Artifact.md @@ -3,16 +3,16 @@ Artifact indicates an artifact to place at a specified path ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **str** | name of the artifact. must be unique within a template's inputs/outputs. | **archive** | [**IoArgoprojWorkflowV1alpha1ArchiveStrategy**](IoArgoprojWorkflowV1alpha1ArchiveStrategy.md) | | [optional] **archive_logs** | **bool** | ArchiveLogs indicates if the container logs should be archived | [optional] **artifact_gc** | [**IoArgoprojWorkflowV1alpha1ArtifactGC**](IoArgoprojWorkflowV1alpha1ArtifactGC.md) | | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifact**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md) | | [optional] **azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifact**](IoArgoprojWorkflowV1alpha1AzureArtifact.md) | | [optional] **deleted** | **bool** | Has this been deleted? | [optional] -**_from** | **str** | From allows an artifact to reference an artifact from a previous step | [optional] +**var_from** | **str** | From allows an artifact to reference an artifact from a previous step | [optional] **from_expression** | **str** | FromExpression, if defined, is evaluated to specify the value for the artifact | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifact**](IoArgoprojWorkflowV1alpha1GCSArtifact.md) | | [optional] **git** | [**IoArgoprojWorkflowV1alpha1GitArtifact**](IoArgoprojWorkflowV1alpha1GitArtifact.md) | | [optional] @@ -20,6 +20,7 @@ Name | Type | Description | Notes **hdfs** | [**IoArgoprojWorkflowV1alpha1HDFSArtifact**](IoArgoprojWorkflowV1alpha1HDFSArtifact.md) | | [optional] **http** | [**IoArgoprojWorkflowV1alpha1HTTPArtifact**](IoArgoprojWorkflowV1alpha1HTTPArtifact.md) | | [optional] **mode** | **int** | mode bits to use on this file, must be a value between 0 and 0777 set when loading input artifacts. | [optional] +**name** | **str** | name of the artifact. must be unique within a template's inputs/outputs. | **optional** | **bool** | Make Artifacts optional, if Artifacts doesn't generate or exist | [optional] **oss** | [**IoArgoprojWorkflowV1alpha1OSSArtifact**](IoArgoprojWorkflowV1alpha1OSSArtifact.md) | | [optional] **path** | **str** | Path is the container path to the artifact | [optional] @@ -27,8 +28,24 @@ Name | Type | Description | Notes **recurse_mode** | **bool** | If mode is set, apply the permission recursively into the artifact if it is a folder | [optional] **s3** | [**IoArgoprojWorkflowV1alpha1S3Artifact**](IoArgoprojWorkflowV1alpha1S3Artifact.md) | | [optional] **sub_path** | **str** | SubPath allows an artifact to be sourced from a subpath within the specified source | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact import IoArgoprojWorkflowV1alpha1Artifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Artifact from a JSON string +io_argoproj_workflow_v1alpha1_artifact_instance = IoArgoprojWorkflowV1alpha1Artifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Artifact.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_dict = io_argoproj_workflow_v1alpha1_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Artifact from a dict +io_argoproj_workflow_v1alpha1_artifact_form_dict = io_argoproj_workflow_v1alpha1_artifact.from_dict(io_argoproj_workflow_v1alpha1_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md index 38b6a55f9457..aa10ccae11cd 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGC.md @@ -3,13 +3,30 @@ ArtifactGC describes how to delete artifacts from completed Workflows - this is embedded into the WorkflowLevelArtifactGC, and also used for individual Artifacts to override that as needed ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **pod_metadata** | [**IoArgoprojWorkflowV1alpha1Metadata**](IoArgoprojWorkflowV1alpha1Metadata.md) | | [optional] **service_account_name** | **str** | ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion | [optional] **strategy** | **str** | Strategy is the strategy to use. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc import IoArgoprojWorkflowV1alpha1ArtifactGC + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactGC from a JSON string +io_argoproj_workflow_v1alpha1_artifact_gc_instance = IoArgoprojWorkflowV1alpha1ArtifactGC.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactGC.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_gc_dict = io_argoproj_workflow_v1alpha1_artifact_gc_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactGC from a dict +io_argoproj_workflow_v1alpha1_artifact_gc_form_dict = io_argoproj_workflow_v1alpha1_artifact_gc.from_dict(io_argoproj_workflow_v1alpha1_artifact_gc_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md index f6a6a22eb920..dbcb719e7810 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCSpec.md @@ -3,11 +3,28 @@ ArtifactGCSpec specifies the Artifacts that need to be deleted ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**artifacts_by_node** | [**{str: (IoArgoprojWorkflowV1alpha1ArtifactNodeSpec,)}**](IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md) | ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**artifacts_by_node** | [**Dict[str, IoArgoprojWorkflowV1alpha1ArtifactNodeSpec]**](IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md) | ArtifactsByNode maps Node name to information pertaining to Artifacts on that Node | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc_spec import IoArgoprojWorkflowV1alpha1ArtifactGCSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactGCSpec from a JSON string +io_argoproj_workflow_v1alpha1_artifact_gc_spec_instance = IoArgoprojWorkflowV1alpha1ArtifactGCSpec.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactGCSpec.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_gc_spec_dict = io_argoproj_workflow_v1alpha1_artifact_gc_spec_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactGCSpec from a dict +io_argoproj_workflow_v1alpha1_artifact_gc_spec_form_dict = io_argoproj_workflow_v1alpha1_artifact_gc_spec.from_dict(io_argoproj_workflow_v1alpha1_artifact_gc_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md index 05cedb1701e4..9b0c1c74549b 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactGCStatus.md @@ -3,11 +3,28 @@ ArtifactGCStatus describes the result of the deletion ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**artifact_results_by_node** | [**{str: (IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus,)}**](IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md) | ArtifactResultsByNode maps Node name to result | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**artifact_results_by_node** | [**Dict[str, IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus]**](IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md) | ArtifactResultsByNode maps Node name to result | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_gc_status import IoArgoprojWorkflowV1alpha1ArtifactGCStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactGCStatus from a JSON string +io_argoproj_workflow_v1alpha1_artifact_gc_status_instance = IoArgoprojWorkflowV1alpha1ArtifactGCStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactGCStatus.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_gc_status_dict = io_argoproj_workflow_v1alpha1_artifact_gc_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactGCStatus from a dict +io_argoproj_workflow_v1alpha1_artifact_gc_status_form_dict = io_argoproj_workflow_v1alpha1_artifact_gc_status.from_dict(io_argoproj_workflow_v1alpha1_artifact_gc_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md index e2e4949deb82..6b6d08195881 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactLocation.md @@ -3,6 +3,7 @@ ArtifactLocation describes a location for a single or multiple artifacts. It is used as single artifact in the context of inputs/outputs (e.g. outputs.artifacts.artname). It is also used to describe the location of multiple artifacts such as the archive location of a single workflow step, which the executor will use as a default location to store its files. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **archive_logs** | **bool** | ArchiveLogs indicates if the container logs should be archived | [optional] @@ -15,8 +16,24 @@ Name | Type | Description | Notes **oss** | [**IoArgoprojWorkflowV1alpha1OSSArtifact**](IoArgoprojWorkflowV1alpha1OSSArtifact.md) | | [optional] **raw** | [**IoArgoprojWorkflowV1alpha1RawArtifact**](IoArgoprojWorkflowV1alpha1RawArtifact.md) | | [optional] **s3** | [**IoArgoprojWorkflowV1alpha1S3Artifact**](IoArgoprojWorkflowV1alpha1S3Artifact.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_location import IoArgoprojWorkflowV1alpha1ArtifactLocation + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactLocation from a JSON string +io_argoproj_workflow_v1alpha1_artifact_location_instance = IoArgoprojWorkflowV1alpha1ArtifactLocation.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactLocation.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_location_dict = io_argoproj_workflow_v1alpha1_artifact_location_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactLocation from a dict +io_argoproj_workflow_v1alpha1_artifact_location_form_dict = io_argoproj_workflow_v1alpha1_artifact_location.from_dict(io_argoproj_workflow_v1alpha1_artifact_location_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md index 4217c51c7e00..6dc601c11234 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.md @@ -3,12 +3,29 @@ ArtifactNodeSpec specifies the Artifacts that need to be deleted for a given Node ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **archive_location** | [**IoArgoprojWorkflowV1alpha1ArtifactLocation**](IoArgoprojWorkflowV1alpha1ArtifactLocation.md) | | [optional] -**artifacts** | [**{str: (IoArgoprojWorkflowV1alpha1Artifact,)}**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifacts maps artifact name to Artifact description | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**artifacts** | [**Dict[str, IoArgoprojWorkflowV1alpha1Artifact]**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifacts maps artifact name to Artifact description | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_node_spec import IoArgoprojWorkflowV1alpha1ArtifactNodeSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactNodeSpec from a JSON string +io_argoproj_workflow_v1alpha1_artifact_node_spec_instance = IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactNodeSpec.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_node_spec_dict = io_argoproj_workflow_v1alpha1_artifact_node_spec_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactNodeSpec from a dict +io_argoproj_workflow_v1alpha1_artifact_node_spec_form_dict = io_argoproj_workflow_v1alpha1_artifact_node_spec.from_dict(io_argoproj_workflow_v1alpha1_artifact_node_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md index 25fe6ad26855..86095661d32a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactPaths.md @@ -3,16 +3,16 @@ ArtifactPaths expands a step from a collection of artifacts ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **str** | name of the artifact. must be unique within a template's inputs/outputs. | **archive** | [**IoArgoprojWorkflowV1alpha1ArchiveStrategy**](IoArgoprojWorkflowV1alpha1ArchiveStrategy.md) | | [optional] **archive_logs** | **bool** | ArchiveLogs indicates if the container logs should be archived | [optional] **artifact_gc** | [**IoArgoprojWorkflowV1alpha1ArtifactGC**](IoArgoprojWorkflowV1alpha1ArtifactGC.md) | | [optional] **artifactory** | [**IoArgoprojWorkflowV1alpha1ArtifactoryArtifact**](IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md) | | [optional] **azure** | [**IoArgoprojWorkflowV1alpha1AzureArtifact**](IoArgoprojWorkflowV1alpha1AzureArtifact.md) | | [optional] **deleted** | **bool** | Has this been deleted? | [optional] -**_from** | **str** | From allows an artifact to reference an artifact from a previous step | [optional] +**var_from** | **str** | From allows an artifact to reference an artifact from a previous step | [optional] **from_expression** | **str** | FromExpression, if defined, is evaluated to specify the value for the artifact | [optional] **gcs** | [**IoArgoprojWorkflowV1alpha1GCSArtifact**](IoArgoprojWorkflowV1alpha1GCSArtifact.md) | | [optional] **git** | [**IoArgoprojWorkflowV1alpha1GitArtifact**](IoArgoprojWorkflowV1alpha1GitArtifact.md) | | [optional] @@ -20,6 +20,7 @@ Name | Type | Description | Notes **hdfs** | [**IoArgoprojWorkflowV1alpha1HDFSArtifact**](IoArgoprojWorkflowV1alpha1HDFSArtifact.md) | | [optional] **http** | [**IoArgoprojWorkflowV1alpha1HTTPArtifact**](IoArgoprojWorkflowV1alpha1HTTPArtifact.md) | | [optional] **mode** | **int** | mode bits to use on this file, must be a value between 0 and 0777 set when loading input artifacts. | [optional] +**name** | **str** | name of the artifact. must be unique within a template's inputs/outputs. | **optional** | **bool** | Make Artifacts optional, if Artifacts doesn't generate or exist | [optional] **oss** | [**IoArgoprojWorkflowV1alpha1OSSArtifact**](IoArgoprojWorkflowV1alpha1OSSArtifact.md) | | [optional] **path** | **str** | Path is the container path to the artifact | [optional] @@ -27,8 +28,24 @@ Name | Type | Description | Notes **recurse_mode** | **bool** | If mode is set, apply the permission recursively into the artifact if it is a folder | [optional] **s3** | [**IoArgoprojWorkflowV1alpha1S3Artifact**](IoArgoprojWorkflowV1alpha1S3Artifact.md) | | [optional] **sub_path** | **str** | SubPath allows an artifact to be sourced from a subpath within the specified source | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_paths import IoArgoprojWorkflowV1alpha1ArtifactPaths + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactPaths from a JSON string +io_argoproj_workflow_v1alpha1_artifact_paths_instance = IoArgoprojWorkflowV1alpha1ArtifactPaths.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactPaths.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_paths_dict = io_argoproj_workflow_v1alpha1_artifact_paths_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactPaths from a dict +io_argoproj_workflow_v1alpha1_artifact_paths_form_dict = io_argoproj_workflow_v1alpha1_artifact_paths.from_dict(io_argoproj_workflow_v1alpha1_artifact_paths_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md index c5db3c661979..7c8989ea3064 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepository.md @@ -3,6 +3,7 @@ ArtifactRepository represents an artifact repository in which a controller will store its artifacts ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **archive_logs** | **bool** | ArchiveLogs enables log archiving | [optional] @@ -12,8 +13,24 @@ Name | Type | Description | Notes **hdfs** | [**IoArgoprojWorkflowV1alpha1HDFSArtifactRepository**](IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.md) | | [optional] **oss** | [**IoArgoprojWorkflowV1alpha1OSSArtifactRepository**](IoArgoprojWorkflowV1alpha1OSSArtifactRepository.md) | | [optional] **s3** | [**IoArgoprojWorkflowV1alpha1S3ArtifactRepository**](IoArgoprojWorkflowV1alpha1S3ArtifactRepository.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactRepository + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepository from a JSON string +io_argoproj_workflow_v1alpha1_artifact_repository_instance = IoArgoprojWorkflowV1alpha1ArtifactRepository.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactRepository.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_repository_dict = io_argoproj_workflow_v1alpha1_artifact_repository_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepository from a dict +io_argoproj_workflow_v1alpha1_artifact_repository_form_dict = io_argoproj_workflow_v1alpha1_artifact_repository.from_dict(io_argoproj_workflow_v1alpha1_artifact_repository_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef.md index 23453711d89b..543c7ad78050 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config_map** | **str** | The name of the config map. Defaults to \"artifact-repositories\". | [optional] **key** | **str** | The config map key. Defaults to the value of the \"workflows.argoproj.io/default-artifact-repository\" annotation. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository_ref import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef from a JSON string +io_argoproj_workflow_v1alpha1_artifact_repository_ref_instance = IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_repository_ref_dict = io_argoproj_workflow_v1alpha1_artifact_repository_ref_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef from a dict +io_argoproj_workflow_v1alpha1_artifact_repository_ref_form_dict = io_argoproj_workflow_v1alpha1_artifact_repository_ref.from_dict(io_argoproj_workflow_v1alpha1_artifact_repository_ref_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.md index 66b94438ed5c..6c57a8f59879 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **artifact_repository** | [**IoArgoprojWorkflowV1alpha1ArtifactRepository**](IoArgoprojWorkflowV1alpha1ArtifactRepository.md) | | [optional] @@ -9,8 +10,24 @@ Name | Type | Description | Notes **default** | **bool** | If this ref represents the default artifact repository, rather than a config map. | [optional] **key** | **str** | The config map key. Defaults to the value of the \"workflows.argoproj.io/default-artifact-repository\" annotation. | [optional] **namespace** | **str** | The namespace of the config map. Defaults to the workflow's namespace, or the controller's namespace (if found). | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_repository_ref_status import IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus from a JSON string +io_argoproj_workflow_v1alpha1_artifact_repository_ref_status_instance = IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_repository_ref_status_dict = io_argoproj_workflow_v1alpha1_artifact_repository_ref_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus from a dict +io_argoproj_workflow_v1alpha1_artifact_repository_ref_status_form_dict = io_argoproj_workflow_v1alpha1_artifact_repository_ref_status.from_dict(io_argoproj_workflow_v1alpha1_artifact_repository_ref_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md index dad97f7d325f..2ab703ac576d 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResult.md @@ -3,13 +3,30 @@ ArtifactResult describes the result of attempting to delete a given Artifact ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **str** | Name is the name of the Artifact | **error** | **str** | Error is an optional error message which should be set if Success==false | [optional] +**name** | **str** | Name is the name of the Artifact | **success** | **bool** | Success describes whether the deletion succeeded | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_result import IoArgoprojWorkflowV1alpha1ArtifactResult + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactResult from a JSON string +io_argoproj_workflow_v1alpha1_artifact_result_instance = IoArgoprojWorkflowV1alpha1ArtifactResult.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactResult.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_result_dict = io_argoproj_workflow_v1alpha1_artifact_result_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactResult from a dict +io_argoproj_workflow_v1alpha1_artifact_result_form_dict = io_argoproj_workflow_v1alpha1_artifact_result.from_dict(io_argoproj_workflow_v1alpha1_artifact_result_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md index 8677a87b5933..06cc93416716 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.md @@ -3,11 +3,28 @@ ArtifactResultNodeStatus describes the result of the deletion on a given node ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**artifact_results** | [**{str: (IoArgoprojWorkflowV1alpha1ArtifactResult,)}**](IoArgoprojWorkflowV1alpha1ArtifactResult.md) | ArtifactResults maps Artifact name to result of the deletion | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**artifact_results** | [**Dict[str, IoArgoprojWorkflowV1alpha1ArtifactResult]**](IoArgoprojWorkflowV1alpha1ArtifactResult.md) | ArtifactResults maps Artifact name to result of the deletion | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifact_result_node_status import IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus from a JSON string +io_argoproj_workflow_v1alpha1_artifact_result_node_status_instance = IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifact_result_node_status_dict = io_argoproj_workflow_v1alpha1_artifact_result_node_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactResultNodeStatus from a dict +io_argoproj_workflow_v1alpha1_artifact_result_node_status_form_dict = io_argoproj_workflow_v1alpha1_artifact_result_node_status.from_dict(io_argoproj_workflow_v1alpha1_artifact_result_node_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md index 727631f47d24..f58f89f94d24 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.md @@ -3,13 +3,30 @@ ArtifactoryArtifact is the location of an artifactory artifact ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**url** | **str** | URL of the artifact | **password_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**url** | **str** | URL of the artifact | **username_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact import IoArgoprojWorkflowV1alpha1ArtifactoryArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactoryArtifact from a JSON string +io_argoproj_workflow_v1alpha1_artifactory_artifact_instance = IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactoryArtifact.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifactory_artifact_dict = io_argoproj_workflow_v1alpha1_artifactory_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactoryArtifact from a dict +io_argoproj_workflow_v1alpha1_artifactory_artifact_form_dict = io_argoproj_workflow_v1alpha1_artifactory_artifact.from_dict(io_argoproj_workflow_v1alpha1_artifactory_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository.md index 557a9e12e0f0..07a2721b883b 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository.md @@ -3,14 +3,31 @@ ArtifactoryArtifactRepository defines the controller configuration for an artifactory artifact repository ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key_format** | **str** | KeyFormat defines the format of how to store keys and can reference workflow variables. | [optional] **password_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **repo_url** | **str** | RepoURL is the url for artifactory repo. | [optional] **username_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_artifactory_artifact_repository import IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository from a JSON string +io_argoproj_workflow_v1alpha1_artifactory_artifact_repository_instance = IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_artifactory_artifact_repository_dict = io_argoproj_workflow_v1alpha1_artifactory_artifact_repository_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository from a dict +io_argoproj_workflow_v1alpha1_artifactory_artifact_repository_form_dict = io_argoproj_workflow_v1alpha1_artifactory_artifact_repository.from_dict(io_argoproj_workflow_v1alpha1_artifactory_artifact_repository_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md index 68e7087d0aa3..26f7c123d9c2 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifact.md @@ -3,15 +3,32 @@ AzureArtifact is the location of a an Azure Storage artifact ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**account_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **blob** | **str** | Blob is the blob name (i.e., path) in the container where the artifact resides | **container** | **str** | Container is the container where resources will be stored | **endpoint** | **str** | Endpoint is the service url associated with an account. It is most likely \"https://<ACCOUNT_NAME>.blob.core.windows.net\" | -**account_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **use_sdk_creds** | **bool** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact import IoArgoprojWorkflowV1alpha1AzureArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1AzureArtifact from a JSON string +io_argoproj_workflow_v1alpha1_azure_artifact_instance = IoArgoprojWorkflowV1alpha1AzureArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1AzureArtifact.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_azure_artifact_dict = io_argoproj_workflow_v1alpha1_azure_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1AzureArtifact from a dict +io_argoproj_workflow_v1alpha1_azure_artifact_form_dict = io_argoproj_workflow_v1alpha1_azure_artifact.from_dict(io_argoproj_workflow_v1alpha1_azure_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md index 2becb466ebb5..83afaa450b12 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1AzureArtifactRepository.md @@ -3,15 +3,32 @@ AzureArtifactRepository defines the controller configuration for an Azure Blob Storage artifact repository ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**container** | **str** | Container is the container where resources will be stored | -**endpoint** | **str** | Endpoint is the service url associated with an account. It is most likely \"https://<ACCOUNT_NAME>.blob.core.windows.net\" | **account_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **blob_name_format** | **str** | BlobNameFormat is defines the format of how to store blob names. Can reference workflow variables | [optional] +**container** | **str** | Container is the container where resources will be stored | +**endpoint** | **str** | Endpoint is the service url associated with an account. It is most likely \"https://<ACCOUNT_NAME>.blob.core.windows.net\" | **use_sdk_creds** | **bool** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_azure_artifact_repository import IoArgoprojWorkflowV1alpha1AzureArtifactRepository + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1AzureArtifactRepository from a JSON string +io_argoproj_workflow_v1alpha1_azure_artifact_repository_instance = IoArgoprojWorkflowV1alpha1AzureArtifactRepository.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1AzureArtifactRepository.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_azure_artifact_repository_dict = io_argoproj_workflow_v1alpha1_azure_artifact_repository_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1AzureArtifactRepository from a dict +io_argoproj_workflow_v1alpha1_azure_artifact_repository_form_dict = io_argoproj_workflow_v1alpha1_azure_artifact_repository.from_dict(io_argoproj_workflow_v1alpha1_azure_artifact_repository_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Backoff.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Backoff.md index 2e7bdeb78572..5e3f467f0334 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Backoff.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Backoff.md @@ -3,13 +3,30 @@ Backoff is a backoff strategy to use within retryStrategy ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **duration** | **str** | Duration is the amount to back off. Default unit is seconds, but could also be a duration (e.g. \"2m\", \"1h\") | [optional] **factor** | **str** | | [optional] **max_duration** | **str** | MaxDuration is the maximum amount of time allowed for a workflow in the backoff strategy | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_backoff import IoArgoprojWorkflowV1alpha1Backoff + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Backoff from a JSON string +io_argoproj_workflow_v1alpha1_backoff_instance = IoArgoprojWorkflowV1alpha1Backoff.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Backoff.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_backoff_dict = io_argoproj_workflow_v1alpha1_backoff_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Backoff from a dict +io_argoproj_workflow_v1alpha1_backoff_form_dict = io_argoproj_workflow_v1alpha1_backoff.from_dict(io_argoproj_workflow_v1alpha1_backoff_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md index 52326a531986..073b50c9f86a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1BasicAuth.md @@ -3,12 +3,29 @@ BasicAuth describes the secret selectors required for basic authentication ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **password_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **username_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_basic_auth import IoArgoprojWorkflowV1alpha1BasicAuth + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1BasicAuth from a JSON string +io_argoproj_workflow_v1alpha1_basic_auth_instance = IoArgoprojWorkflowV1alpha1BasicAuth.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1BasicAuth.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_basic_auth_dict = io_argoproj_workflow_v1alpha1_basic_auth_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1BasicAuth from a dict +io_argoproj_workflow_v1alpha1_basic_auth_form_dict = io_argoproj_workflow_v1alpha1_basic_auth.from_dict(io_argoproj_workflow_v1alpha1_basic_auth_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Cache.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Cache.md index 5871ad1e45b0..4b4cd38b6660 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Cache.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Cache.md @@ -3,11 +3,28 @@ Cache is the configuration for the type of cache to be used ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config_map** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cache import IoArgoprojWorkflowV1alpha1Cache + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Cache from a JSON string +io_argoproj_workflow_v1alpha1_cache_instance = IoArgoprojWorkflowV1alpha1Cache.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Cache.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cache_dict = io_argoproj_workflow_v1alpha1_cache_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Cache from a dict +io_argoproj_workflow_v1alpha1_cache_form_dict = io_argoproj_workflow_v1alpha1_cache.from_dict(io_argoproj_workflow_v1alpha1_cache_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md index b2e389e51983..e188b2b220ff 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClientCertAuth.md @@ -3,12 +3,29 @@ ClientCertAuth holds necessary information for client authentication via certificates ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **client_cert_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **client_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_client_cert_auth import IoArgoprojWorkflowV1alpha1ClientCertAuth + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ClientCertAuth from a JSON string +io_argoproj_workflow_v1alpha1_client_cert_auth_instance = IoArgoprojWorkflowV1alpha1ClientCertAuth.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ClientCertAuth.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_client_cert_auth_dict = io_argoproj_workflow_v1alpha1_client_cert_auth_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ClientCertAuth from a dict +io_argoproj_workflow_v1alpha1_client_cert_auth_form_dict = io_argoproj_workflow_v1alpha1_client_cert_auth.from_dict(io_argoproj_workflow_v1alpha1_client_cert_auth_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.md index 5f7ef820c86a..199f6ba0b37e 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.md @@ -3,14 +3,31 @@ ClusterWorkflowTemplate is the definition of a workflow template resource in cluster scope ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**metadata** | [**ObjectMeta**](ObjectMeta.md) | | -**spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**metadata** | [**ObjectMeta**](ObjectMeta.md) | | +**spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate from a JSON string +io_argoproj_workflow_v1alpha1_cluster_workflow_template_instance = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate from a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_form_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template.from_dict(io_argoproj_workflow_v1alpha1_cluster_workflow_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest.md index 221cae041a3b..411a82d09f2b 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **create_options** | [**CreateOptions**](CreateOptions.md) | | [optional] **template** | [**IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest from a JSON string +io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request_instance = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateCreateRequest from a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request_form_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request.from_dict(io_argoproj_workflow_v1alpha1_cluster_workflow_template_create_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest.md index bf3c3fe2fdcf..cd3e1e3c790e 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **create_options** | [**CreateOptions**](CreateOptions.md) | | [optional] **template** | [**IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest from a JSON string +io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request_instance = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateLintRequest from a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request_form_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request.from_dict(io_argoproj_workflow_v1alpha1_cluster_workflow_template_lint_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList.md index 356a7f8c3117..67e152a5c3a4 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList.md @@ -3,14 +3,31 @@ ClusterWorkflowTemplateList is list of ClusterWorkflowTemplate resources ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate]**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.md) | | -**metadata** | [**ListMeta**](ListMeta.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] +**items** | [**List[IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate]**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.md) | | **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**metadata** | [**ListMeta**](ListMeta.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_list import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList from a JSON string +io_argoproj_workflow_v1alpha1_cluster_workflow_template_list_instance = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_list_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template_list_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateList from a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_list_form_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template_list.from_dict(io_argoproj_workflow_v1alpha1_cluster_workflow_template_list_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest.md index cf64cc12cc7d..61a009de702e 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | DEPRECATED: This field is ignored. | [optional] **template** | [**IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate**](IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplate.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request import IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest from a JSON string +io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request_instance = IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ClusterWorkflowTemplateUpdateRequest from a dict +io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request_form_dict = io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request.from_dict(io_argoproj_workflow_v1alpha1_cluster_workflow_template_update_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md index 5595664f1a1a..923c632ebca6 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CollectEventRequest.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_collect_event_request import IoArgoprojWorkflowV1alpha1CollectEventRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1CollectEventRequest from a JSON string +io_argoproj_workflow_v1alpha1_collect_event_request_instance = IoArgoprojWorkflowV1alpha1CollectEventRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1CollectEventRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_collect_event_request_dict = io_argoproj_workflow_v1alpha1_collect_event_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1CollectEventRequest from a dict +io_argoproj_workflow_v1alpha1_collect_event_request_form_dict = io_argoproj_workflow_v1alpha1_collect_event_request.from_dict(io_argoproj_workflow_v1alpha1_collect_event_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Column.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Column.md index c54e15b0f8d6..ea13d462f0d7 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Column.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Column.md @@ -3,13 +3,30 @@ Column is a custom column that will be exposed in the Workflow List View. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | The key of the label or annotation, e.g., \"workflows.argoproj.io/completed\". | **name** | **str** | The name of this column, e.g., \"Workflow Completed\". | **type** | **str** | The type of this column, \"label\" or \"annotation\". | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_column import IoArgoprojWorkflowV1alpha1Column + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Column from a JSON string +io_argoproj_workflow_v1alpha1_column_instance = IoArgoprojWorkflowV1alpha1Column.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Column.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_column_dict = io_argoproj_workflow_v1alpha1_column_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Column from a dict +io_argoproj_workflow_v1alpha1_column_form_dict = io_argoproj_workflow_v1alpha1_column.from_dict(io_argoproj_workflow_v1alpha1_column_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Condition.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Condition.md index 1f7b0e095470..04b8bf5e16ea 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Condition.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Condition.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **message** | **str** | Message is the condition message | [optional] **status** | **str** | Status is the status of the condition | [optional] **type** | **str** | Type is the type of condition | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_condition import IoArgoprojWorkflowV1alpha1Condition + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Condition from a JSON string +io_argoproj_workflow_v1alpha1_condition_instance = IoArgoprojWorkflowV1alpha1Condition.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Condition.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_condition_dict = io_argoproj_workflow_v1alpha1_condition_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Condition from a dict +io_argoproj_workflow_v1alpha1_condition_form_dict = io_argoproj_workflow_v1alpha1_condition.from_dict(io_argoproj_workflow_v1alpha1_condition_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md index 133c91613716..a7bfccec699b 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerNode.md @@ -2,19 +2,20 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **str** | Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. | -**args** | **[str]** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **[str]** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**dependencies** | **[str]** | | [optional] -**env** | [**[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] -**env_from** | [**[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] +**args** | **List[str]** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **List[str]** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**dependencies** | **List[str]** | | [optional] +**env** | [**List[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] +**env_from** | [**List[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] **image** | **str** | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] **image_pull_policy** | **str** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images | [optional] **lifecycle** | [**Lifecycle**](Lifecycle.md) | | [optional] **liveness_probe** | [**Probe**](Probe.md) | | [optional] -**ports** | [**[ContainerPort]**](ContainerPort.md) | List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated. | [optional] +**name** | **str** | Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. | +**ports** | [**List[ContainerPort]**](ContainerPort.md) | List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated. | [optional] **readiness_probe** | [**Probe**](Probe.md) | | [optional] **resources** | [**ResourceRequirements**](ResourceRequirements.md) | | [optional] **security_context** | [**SecurityContext**](SecurityContext.md) | | [optional] @@ -24,11 +25,27 @@ Name | Type | Description | Notes **termination_message_path** | **str** | Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated. | [optional] **termination_message_policy** | **str** | Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. | [optional] **tty** | **bool** | Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false. | [optional] -**volume_devices** | [**[VolumeDevice]**](VolumeDevice.md) | volumeDevices is the list of block devices to be used by the container. | [optional] -**volume_mounts** | [**[VolumeMount]**](VolumeMount.md) | Pod volumes to mount into the container's filesystem. Cannot be updated. | [optional] +**volume_devices** | [**List[VolumeDevice]**](VolumeDevice.md) | volumeDevices is the list of block devices to be used by the container. | [optional] +**volume_mounts** | [**List[VolumeMount]**](VolumeMount.md) | Pod volumes to mount into the container's filesystem. Cannot be updated. | [optional] **working_dir** | **str** | Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_node import IoArgoprojWorkflowV1alpha1ContainerNode + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ContainerNode from a JSON string +io_argoproj_workflow_v1alpha1_container_node_instance = IoArgoprojWorkflowV1alpha1ContainerNode.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ContainerNode.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_container_node_dict = io_argoproj_workflow_v1alpha1_container_node_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ContainerNode from a dict +io_argoproj_workflow_v1alpha1_container_node_form_dict = io_argoproj_workflow_v1alpha1_container_node.from_dict(io_argoproj_workflow_v1alpha1_container_node_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy.md index 50e09bc46bd8..6bd01be2525b 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**retries** | **str** | | **duration** | **str** | Duration is the time between each retry, examples values are \"300ms\", \"1s\" or \"5m\". Valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\", \"m\", \"h\". | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**retries** | **str** | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_set_retry_strategy import IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy from a JSON string +io_argoproj_workflow_v1alpha1_container_set_retry_strategy_instance = IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_container_set_retry_strategy_dict = io_argoproj_workflow_v1alpha1_container_set_retry_strategy_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy from a dict +io_argoproj_workflow_v1alpha1_container_set_retry_strategy_form_dict = io_argoproj_workflow_v1alpha1_container_set_retry_strategy.from_dict(io_argoproj_workflow_v1alpha1_container_set_retry_strategy_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerSetTemplate.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerSetTemplate.md index b229e5933177..9f2d6cfa6d3c 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerSetTemplate.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContainerSetTemplate.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**containers** | [**[IoArgoprojWorkflowV1alpha1ContainerNode]**](IoArgoprojWorkflowV1alpha1ContainerNode.md) | | +**containers** | [**List[IoArgoprojWorkflowV1alpha1ContainerNode]**](IoArgoprojWorkflowV1alpha1ContainerNode.md) | | **retry_strategy** | [**IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy**](IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy.md) | | [optional] -**volume_mounts** | [**[VolumeMount]**](VolumeMount.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**volume_mounts** | [**List[VolumeMount]**](VolumeMount.md) | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_container_set_template import IoArgoprojWorkflowV1alpha1ContainerSetTemplate + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ContainerSetTemplate from a JSON string +io_argoproj_workflow_v1alpha1_container_set_template_instance = IoArgoprojWorkflowV1alpha1ContainerSetTemplate.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ContainerSetTemplate.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_container_set_template_dict = io_argoproj_workflow_v1alpha1_container_set_template_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ContainerSetTemplate from a dict +io_argoproj_workflow_v1alpha1_container_set_template_form_dict = io_argoproj_workflow_v1alpha1_container_set_template.from_dict(io_argoproj_workflow_v1alpha1_container_set_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContinueOn.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContinueOn.md index 42c2edaa5c2d..06f3c17dfa94 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContinueOn.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ContinueOn.md @@ -3,12 +3,29 @@ ContinueOn defines if a workflow should continue even if a task or step fails/errors. It can be specified if the workflow should continue when the pod errors, fails or both. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **error** | **bool** | | [optional] **failed** | **bool** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_continue_on import IoArgoprojWorkflowV1alpha1ContinueOn + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ContinueOn from a JSON string +io_argoproj_workflow_v1alpha1_continue_on_instance = IoArgoprojWorkflowV1alpha1ContinueOn.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ContinueOn.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_continue_on_dict = io_argoproj_workflow_v1alpha1_continue_on_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ContinueOn from a dict +io_argoproj_workflow_v1alpha1_continue_on_form_dict = io_argoproj_workflow_v1alpha1_continue_on.from_dict(io_argoproj_workflow_v1alpha1_continue_on_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Counter.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Counter.md index a60f1ed9eb97..ca4aa84c92fa 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Counter.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Counter.md @@ -3,11 +3,28 @@ Counter is a Counter prometheus metric ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **value** | **str** | Value is the value of the metric | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_counter import IoArgoprojWorkflowV1alpha1Counter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Counter from a JSON string +io_argoproj_workflow_v1alpha1_counter_instance = IoArgoprojWorkflowV1alpha1Counter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Counter.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_counter_dict = io_argoproj_workflow_v1alpha1_counter_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Counter from a dict +io_argoproj_workflow_v1alpha1_counter_form_dict = io_argoproj_workflow_v1alpha1_counter.from_dict(io_argoproj_workflow_v1alpha1_counter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest.md index afc6b98ce262..02bafb2d8e86 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **create_options** | [**CreateOptions**](CreateOptions.md) | | [optional] **cron_workflow** | [**IoArgoprojWorkflowV1alpha1CronWorkflow**](IoArgoprojWorkflowV1alpha1CronWorkflow.md) | | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_cron_workflow_request import IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest from a JSON string +io_argoproj_workflow_v1alpha1_create_cron_workflow_request_instance = IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_create_cron_workflow_request_dict = io_argoproj_workflow_v1alpha1_create_cron_workflow_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1CreateCronWorkflowRequest from a dict +io_argoproj_workflow_v1alpha1_create_cron_workflow_request_form_dict = io_argoproj_workflow_v1alpha1_create_cron_workflow_request.from_dict(io_argoproj_workflow_v1alpha1_create_cron_workflow_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CreateS3BucketOptions.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CreateS3BucketOptions.md index 3fb8b742a63e..2d127abaaa02 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CreateS3BucketOptions.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CreateS3BucketOptions.md @@ -3,11 +3,28 @@ CreateS3BucketOptions options used to determine automatic automatic bucket-creation process ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **object_locking** | **bool** | ObjectLocking Enable object locking | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_create_s3_bucket_options import IoArgoprojWorkflowV1alpha1CreateS3BucketOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1CreateS3BucketOptions from a JSON string +io_argoproj_workflow_v1alpha1_create_s3_bucket_options_instance = IoArgoprojWorkflowV1alpha1CreateS3BucketOptions.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1CreateS3BucketOptions.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_create_s3_bucket_options_dict = io_argoproj_workflow_v1alpha1_create_s3_bucket_options_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1CreateS3BucketOptions from a dict +io_argoproj_workflow_v1alpha1_create_s3_bucket_options_form_dict = io_argoproj_workflow_v1alpha1_create_s3_bucket_options.from_dict(io_argoproj_workflow_v1alpha1_create_s3_bucket_options_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflow.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflow.md index 458805d60798..bbb86853c4b3 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflow.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflow.md @@ -3,15 +3,32 @@ CronWorkflow is the definition of a scheduled workflow resource ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**metadata** | [**ObjectMeta**](ObjectMeta.md) | | -**spec** | [**IoArgoprojWorkflowV1alpha1CronWorkflowSpec**](IoArgoprojWorkflowV1alpha1CronWorkflowSpec.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] +**metadata** | [**ObjectMeta**](ObjectMeta.md) | | +**spec** | [**IoArgoprojWorkflowV1alpha1CronWorkflowSpec**](IoArgoprojWorkflowV1alpha1CronWorkflowSpec.md) | | **status** | [**IoArgoprojWorkflowV1alpha1CronWorkflowStatus**](IoArgoprojWorkflowV1alpha1CronWorkflowStatus.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow import IoArgoprojWorkflowV1alpha1CronWorkflow + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflow from a JSON string +io_argoproj_workflow_v1alpha1_cron_workflow_instance = IoArgoprojWorkflowV1alpha1CronWorkflow.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1CronWorkflow.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cron_workflow_dict = io_argoproj_workflow_v1alpha1_cron_workflow_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflow from a dict +io_argoproj_workflow_v1alpha1_cron_workflow_form_dict = io_argoproj_workflow_v1alpha1_cron_workflow.from_dict(io_argoproj_workflow_v1alpha1_cron_workflow_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowList.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowList.md index 729e7bc049a6..44a3e9ac0e8e 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowList.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowList.md @@ -3,14 +3,31 @@ CronWorkflowList is list of CronWorkflow resources ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[IoArgoprojWorkflowV1alpha1CronWorkflow]**](IoArgoprojWorkflowV1alpha1CronWorkflow.md) | | -**metadata** | [**ListMeta**](ListMeta.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] +**items** | [**List[IoArgoprojWorkflowV1alpha1CronWorkflow]**](IoArgoprojWorkflowV1alpha1CronWorkflow.md) | | **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**metadata** | [**ListMeta**](ListMeta.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_list import IoArgoprojWorkflowV1alpha1CronWorkflowList + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowList from a JSON string +io_argoproj_workflow_v1alpha1_cron_workflow_list_instance = IoArgoprojWorkflowV1alpha1CronWorkflowList.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1CronWorkflowList.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cron_workflow_list_dict = io_argoproj_workflow_v1alpha1_cron_workflow_list_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowList from a dict +io_argoproj_workflow_v1alpha1_cron_workflow_list_form_dict = io_argoproj_workflow_v1alpha1_cron_workflow_list.from_dict(io_argoproj_workflow_v1alpha1_cron_workflow_list_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest.md index 113234be4300..d6cc7e6b7172 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_resume_request import IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest from a JSON string +io_argoproj_workflow_v1alpha1_cron_workflow_resume_request_instance = IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cron_workflow_resume_request_dict = io_argoproj_workflow_v1alpha1_cron_workflow_resume_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowResumeRequest from a dict +io_argoproj_workflow_v1alpha1_cron_workflow_resume_request_form_dict = io_argoproj_workflow_v1alpha1_cron_workflow_resume_request.from_dict(io_argoproj_workflow_v1alpha1_cron_workflow_resume_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowSpec.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowSpec.md index 9dae8f1a564c..74ee5737f7ff 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowSpec.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowSpec.md @@ -3,21 +3,38 @@ CronWorkflowSpec is the specification of a CronWorkflow ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**schedule** | **str** | Schedule is a schedule to run the Workflow in Cron format | -**workflow_spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | **concurrency_policy** | **str** | ConcurrencyPolicy is the K8s-style concurrency policy that will be used | [optional] **failed_jobs_history_limit** | **int** | FailedJobsHistoryLimit is the number of failed jobs to be kept at a time | [optional] -**schedules** | **[str]** | Schedules is a list of schedules to run the Workflow in Cron format | [optional] +**schedule** | **str** | Schedule is a schedule to run the Workflow in Cron format | +**schedules** | **List[str]** | Schedules is a list of schedules to run the Workflow in Cron format | [optional] **starting_deadline_seconds** | **int** | StartingDeadlineSeconds is the K8s-style deadline that will limit the time a CronWorkflow will be run after its original scheduled time if it is missed. | [optional] **stop_strategy** | [**IoArgoprojWorkflowV1alpha1StopStrategy**](IoArgoprojWorkflowV1alpha1StopStrategy.md) | | [optional] **successful_jobs_history_limit** | **int** | SuccessfulJobsHistoryLimit is the number of successful jobs to be kept at a time | [optional] **suspend** | **bool** | Suspend is a flag that will stop new CronWorkflows from running if set to true | [optional] **timezone** | **str** | Timezone is the timezone against which the cron schedule will be calculated, e.g. \"Asia/Tokyo\". Default is machine's local time. | [optional] **workflow_metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**workflow_spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_spec import IoArgoprojWorkflowV1alpha1CronWorkflowSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowSpec from a JSON string +io_argoproj_workflow_v1alpha1_cron_workflow_spec_instance = IoArgoprojWorkflowV1alpha1CronWorkflowSpec.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1CronWorkflowSpec.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cron_workflow_spec_dict = io_argoproj_workflow_v1alpha1_cron_workflow_spec_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowSpec from a dict +io_argoproj_workflow_v1alpha1_cron_workflow_spec_form_dict = io_argoproj_workflow_v1alpha1_cron_workflow_spec.from_dict(io_argoproj_workflow_v1alpha1_cron_workflow_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowStatus.md index 776ce033cb50..3b2e21561d63 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowStatus.md @@ -3,16 +3,33 @@ CronWorkflowStatus is the status of a CronWorkflow ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**active** | [**[ObjectReference]**](ObjectReference.md) | Active is a list of active workflows stemming from this CronWorkflow | -**conditions** | [**[IoArgoprojWorkflowV1alpha1Condition]**](IoArgoprojWorkflowV1alpha1Condition.md) | Conditions is a list of conditions the CronWorkflow may have | +**active** | [**List[ObjectReference]**](ObjectReference.md) | Active is a list of active workflows stemming from this CronWorkflow | +**conditions** | [**List[IoArgoprojWorkflowV1alpha1Condition]**](IoArgoprojWorkflowV1alpha1Condition.md) | Conditions is a list of conditions the CronWorkflow may have | **failed** | **int** | Failed is a counter of how many times a child workflow terminated in failed or errored state | **last_scheduled_time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | **phase** | **str** | Phase defines the cron workflow phase. It is changed to Stopped when the stopping condition is achieved which stops new CronWorkflows from running | **succeeded** | **int** | Succeeded is a counter of how many times the child workflows had success | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_status import IoArgoprojWorkflowV1alpha1CronWorkflowStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowStatus from a JSON string +io_argoproj_workflow_v1alpha1_cron_workflow_status_instance = IoArgoprojWorkflowV1alpha1CronWorkflowStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1CronWorkflowStatus.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cron_workflow_status_dict = io_argoproj_workflow_v1alpha1_cron_workflow_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowStatus from a dict +io_argoproj_workflow_v1alpha1_cron_workflow_status_form_dict = io_argoproj_workflow_v1alpha1_cron_workflow_status.from_dict(io_argoproj_workflow_v1alpha1_cron_workflow_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest.md index 507199f62a18..cae360eb51c0 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request import IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest from a JSON string +io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request_instance = IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request_dict = io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1CronWorkflowSuspendRequest from a dict +io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request_form_dict = io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request.from_dict(io_argoproj_workflow_v1alpha1_cron_workflow_suspend_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DAGTask.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DAGTask.md index 4f029b590e5c..c853bc03c6b4 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DAGTask.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DAGTask.md @@ -3,24 +3,41 @@ DAGTask represents a node in the graph during DAG execution ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **str** | Name is the name of the target | **arguments** | [**IoArgoprojWorkflowV1alpha1Arguments**](IoArgoprojWorkflowV1alpha1Arguments.md) | | [optional] **continue_on** | [**IoArgoprojWorkflowV1alpha1ContinueOn**](IoArgoprojWorkflowV1alpha1ContinueOn.md) | | [optional] -**dependencies** | **[str]** | Dependencies are name of other targets which this depends on | [optional] +**dependencies** | **List[str]** | Dependencies are name of other targets which this depends on | [optional] **depends** | **str** | Depends are name of other targets which this depends on | [optional] -**hooks** | [**{str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)}**](IoArgoprojWorkflowV1alpha1LifecycleHook.md) | Hooks hold the lifecycle hook which is invoked at lifecycle of task, irrespective of the success, failure, or error status of the primary task | [optional] +**hooks** | [**Dict[str, IoArgoprojWorkflowV1alpha1LifecycleHook]**](IoArgoprojWorkflowV1alpha1LifecycleHook.md) | Hooks hold the lifecycle hook which is invoked at lifecycle of task, irrespective of the success, failure, or error status of the primary task | [optional] **inline** | [**IoArgoprojWorkflowV1alpha1Template**](IoArgoprojWorkflowV1alpha1Template.md) | | [optional] +**name** | **str** | Name is the name of the target | **on_exit** | **str** | OnExit is a template reference which is invoked at the end of the template, irrespective of the success, failure, or error of the primary template. DEPRECATED: Use Hooks[exit].Template instead. | [optional] **template** | **str** | Name of template to execute | [optional] **template_ref** | [**IoArgoprojWorkflowV1alpha1TemplateRef**](IoArgoprojWorkflowV1alpha1TemplateRef.md) | | [optional] **when** | **str** | When is an expression in which the task should conditionally execute | [optional] -**with_items** | **[dict]** | WithItems expands a task into multiple parallel tasks from the items in the list | [optional] +**with_items** | **List[object]** | WithItems expands a task into multiple parallel tasks from the items in the list | [optional] **with_param** | **str** | WithParam expands a task into multiple parallel tasks from the value in the parameter, which is expected to be a JSON list. | [optional] **with_sequence** | [**IoArgoprojWorkflowV1alpha1Sequence**](IoArgoprojWorkflowV1alpha1Sequence.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_dag_task import IoArgoprojWorkflowV1alpha1DAGTask + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1DAGTask from a JSON string +io_argoproj_workflow_v1alpha1_dag_task_instance = IoArgoprojWorkflowV1alpha1DAGTask.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1DAGTask.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_dag_task_dict = io_argoproj_workflow_v1alpha1_dag_task_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1DAGTask from a dict +io_argoproj_workflow_v1alpha1_dag_task_form_dict = io_argoproj_workflow_v1alpha1_dag_task.from_dict(io_argoproj_workflow_v1alpha1_dag_task_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DAGTemplate.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DAGTemplate.md index af46c7d3aec8..ada31fe178c6 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DAGTemplate.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DAGTemplate.md @@ -3,13 +3,30 @@ DAGTemplate is a template subtype for directed acyclic graph templates ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**tasks** | [**[IoArgoprojWorkflowV1alpha1DAGTask]**](IoArgoprojWorkflowV1alpha1DAGTask.md) | Tasks are a list of DAG tasks | **fail_fast** | **bool** | This flag is for DAG logic. The DAG logic has a built-in \"fail fast\" feature to stop scheduling new steps, as soon as it detects that one of the DAG nodes is failed. Then it waits until all DAG nodes are completed before failing the DAG itself. The FailFast flag default is true, if set to false, it will allow a DAG to run all branches of the DAG to completion (either success or failure), regardless of the failed outcomes of branches in the DAG. More info and example about this feature at https://github.com/argoproj/argo-workflows/issues/1442 | [optional] **target** | **str** | Target are one or more names of targets to execute in a DAG | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**tasks** | [**List[IoArgoprojWorkflowV1alpha1DAGTask]**](IoArgoprojWorkflowV1alpha1DAGTask.md) | Tasks are a list of DAG tasks | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_dag_template import IoArgoprojWorkflowV1alpha1DAGTemplate + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1DAGTemplate from a JSON string +io_argoproj_workflow_v1alpha1_dag_template_instance = IoArgoprojWorkflowV1alpha1DAGTemplate.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1DAGTemplate.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_dag_template_dict = io_argoproj_workflow_v1alpha1_dag_template_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1DAGTemplate from a dict +io_argoproj_workflow_v1alpha1_dag_template_form_dict = io_argoproj_workflow_v1alpha1_dag_template.from_dict(io_argoproj_workflow_v1alpha1_dag_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Data.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Data.md index a39e48307445..1b7945286eaf 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Data.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Data.md @@ -3,12 +3,29 @@ Data is a data template ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **source** | [**IoArgoprojWorkflowV1alpha1DataSource**](IoArgoprojWorkflowV1alpha1DataSource.md) | | -**transformation** | [**[IoArgoprojWorkflowV1alpha1TransformationStep]**](IoArgoprojWorkflowV1alpha1TransformationStep.md) | Transformation applies a set of transformations | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**transformation** | [**List[IoArgoprojWorkflowV1alpha1TransformationStep]**](IoArgoprojWorkflowV1alpha1TransformationStep.md) | Transformation applies a set of transformations | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_data import IoArgoprojWorkflowV1alpha1Data + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Data from a JSON string +io_argoproj_workflow_v1alpha1_data_instance = IoArgoprojWorkflowV1alpha1Data.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Data.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_data_dict = io_argoproj_workflow_v1alpha1_data_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Data from a dict +io_argoproj_workflow_v1alpha1_data_form_dict = io_argoproj_workflow_v1alpha1_data.from_dict(io_argoproj_workflow_v1alpha1_data_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DataSource.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DataSource.md index 93509c97733a..d5e32ffc6016 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DataSource.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1DataSource.md @@ -3,11 +3,28 @@ DataSource sources external data into a data template ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **artifact_paths** | [**IoArgoprojWorkflowV1alpha1ArtifactPaths**](IoArgoprojWorkflowV1alpha1ArtifactPaths.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_data_source import IoArgoprojWorkflowV1alpha1DataSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1DataSource from a JSON string +io_argoproj_workflow_v1alpha1_data_source_instance = IoArgoprojWorkflowV1alpha1DataSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1DataSource.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_data_source_dict = io_argoproj_workflow_v1alpha1_data_source_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1DataSource from a dict +io_argoproj_workflow_v1alpha1_data_source_form_dict = io_argoproj_workflow_v1alpha1_data_source.from_dict(io_argoproj_workflow_v1alpha1_data_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Event.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Event.md index 8fe08221b8d7..58b2386ad5b5 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Event.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Event.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **selector** | **str** | Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_event import IoArgoprojWorkflowV1alpha1Event + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Event from a JSON string +io_argoproj_workflow_v1alpha1_event_instance = IoArgoprojWorkflowV1alpha1Event.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Event.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_event_dict = io_argoproj_workflow_v1alpha1_event_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Event from a dict +io_argoproj_workflow_v1alpha1_event_form_dict = io_argoproj_workflow_v1alpha1_event.from_dict(io_argoproj_workflow_v1alpha1_event_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ExecutorConfig.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ExecutorConfig.md index f6ec7f6e3606..d2fcb8b317fa 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ExecutorConfig.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ExecutorConfig.md @@ -3,11 +3,28 @@ ExecutorConfig holds configurations of an executor container. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **service_account_name** | **str** | ServiceAccountName specifies the service account name of the executor container. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_executor_config import IoArgoprojWorkflowV1alpha1ExecutorConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ExecutorConfig from a JSON string +io_argoproj_workflow_v1alpha1_executor_config_instance = IoArgoprojWorkflowV1alpha1ExecutorConfig.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ExecutorConfig.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_executor_config_dict = io_argoproj_workflow_v1alpha1_executor_config_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ExecutorConfig from a dict +io_argoproj_workflow_v1alpha1_executor_config_form_dict = io_argoproj_workflow_v1alpha1_executor_config.from_dict(io_argoproj_workflow_v1alpha1_executor_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GCSArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GCSArtifact.md index dcb76be9ae03..7292c8058c4b 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GCSArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GCSArtifact.md @@ -3,13 +3,30 @@ GCSArtifact is the location of a GCS artifact ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**key** | **str** | Key is the path in the bucket where the artifact resides | **bucket** | **str** | Bucket is the name of the bucket | [optional] +**key** | **str** | Key is the path in the bucket where the artifact resides | **service_account_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact import IoArgoprojWorkflowV1alpha1GCSArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1GCSArtifact from a JSON string +io_argoproj_workflow_v1alpha1_gcs_artifact_instance = IoArgoprojWorkflowV1alpha1GCSArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1GCSArtifact.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_gcs_artifact_dict = io_argoproj_workflow_v1alpha1_gcs_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1GCSArtifact from a dict +io_argoproj_workflow_v1alpha1_gcs_artifact_form_dict = io_argoproj_workflow_v1alpha1_gcs_artifact.from_dict(io_argoproj_workflow_v1alpha1_gcs_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GCSArtifactRepository.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GCSArtifactRepository.md index aea526000f96..cb82422cb3b8 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GCSArtifactRepository.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GCSArtifactRepository.md @@ -3,13 +3,30 @@ GCSArtifactRepository defines the controller configuration for a GCS artifact repository ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **bucket** | **str** | Bucket is the name of the bucket | [optional] **key_format** | **str** | KeyFormat defines the format of how to store keys and can reference workflow variables. | [optional] **service_account_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gcs_artifact_repository import IoArgoprojWorkflowV1alpha1GCSArtifactRepository + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1GCSArtifactRepository from a JSON string +io_argoproj_workflow_v1alpha1_gcs_artifact_repository_instance = IoArgoprojWorkflowV1alpha1GCSArtifactRepository.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1GCSArtifactRepository.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_gcs_artifact_repository_dict = io_argoproj_workflow_v1alpha1_gcs_artifact_repository_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1GCSArtifactRepository from a dict +io_argoproj_workflow_v1alpha1_gcs_artifact_repository_form_dict = io_argoproj_workflow_v1alpha1_gcs_artifact_repository.from_dict(io_argoproj_workflow_v1alpha1_gcs_artifact_repository_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Gauge.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Gauge.md index baf27afa4648..89ee06f1e608 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Gauge.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Gauge.md @@ -3,13 +3,30 @@ Gauge is a Gauge prometheus metric ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**operation** | **str** | Operation defines the operation to apply with value and the metrics' current value | [optional] **realtime** | **bool** | Realtime emits this metric in real time if applicable | **value** | **str** | Value is the value to be used in the operation with the metric's current value. If no operation is set, value is the value of the metric | -**operation** | **str** | Operation defines the operation to apply with value and the metrics' current value | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_gauge import IoArgoprojWorkflowV1alpha1Gauge + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Gauge from a JSON string +io_argoproj_workflow_v1alpha1_gauge_instance = IoArgoprojWorkflowV1alpha1Gauge.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Gauge.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_gauge_dict = io_argoproj_workflow_v1alpha1_gauge_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Gauge from a dict +io_argoproj_workflow_v1alpha1_gauge_form_dict = io_argoproj_workflow_v1alpha1_gauge.from_dict(io_argoproj_workflow_v1alpha1_gauge_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md index 7a679cb492ca..94fc1d63b20b 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GetUserInfoResponse.md @@ -2,18 +2,35 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **email** | **str** | | [optional] **email_verified** | **bool** | | [optional] -**groups** | **[str]** | | [optional] +**groups** | **List[str]** | | [optional] **issuer** | **str** | | [optional] **name** | **str** | | [optional] **service_account_name** | **str** | | [optional] **service_account_namespace** | **str** | | [optional] **subject** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_get_user_info_response import IoArgoprojWorkflowV1alpha1GetUserInfoResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1GetUserInfoResponse from a JSON string +io_argoproj_workflow_v1alpha1_get_user_info_response_instance = IoArgoprojWorkflowV1alpha1GetUserInfoResponse.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1GetUserInfoResponse.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_get_user_info_response_dict = io_argoproj_workflow_v1alpha1_get_user_info_response_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1GetUserInfoResponse from a dict +io_argoproj_workflow_v1alpha1_get_user_info_response_form_dict = io_argoproj_workflow_v1alpha1_get_user_info_response.from_dict(io_argoproj_workflow_v1alpha1_get_user_info_response_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md index b2c81b02c31a..c0746b6e305a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1GitArtifact.md @@ -3,21 +3,38 @@ GitArtifact is the location of an git artifact ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**repo** | **str** | Repo is the git repository | **branch** | **str** | Branch is the branch to fetch when `SingleBranch` is enabled | [optional] **depth** | **int** | Depth specifies clones/fetches should be shallow and include the given number of commits from the branch tip | [optional] **disable_submodules** | **bool** | DisableSubmodules disables submodules during git clone | [optional] -**fetch** | **[str]** | Fetch specifies a number of refs that should be fetched before checkout | [optional] +**fetch** | **List[str]** | Fetch specifies a number of refs that should be fetched before checkout | [optional] **insecure_ignore_host_key** | **bool** | InsecureIgnoreHostKey disables SSH strict host key checking during git clone | [optional] **password_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**repo** | **str** | Repo is the git repository | **revision** | **str** | Revision is the git commit, tag, branch to checkout | [optional] **single_branch** | **bool** | SingleBranch enables single branch clone, using the `branch` parameter | [optional] **ssh_private_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **username_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_git_artifact import IoArgoprojWorkflowV1alpha1GitArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1GitArtifact from a JSON string +io_argoproj_workflow_v1alpha1_git_artifact_instance = IoArgoprojWorkflowV1alpha1GitArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1GitArtifact.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_git_artifact_dict = io_argoproj_workflow_v1alpha1_git_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1GitArtifact from a dict +io_argoproj_workflow_v1alpha1_git_artifact_form_dict = io_argoproj_workflow_v1alpha1_git_artifact.from_dict(io_argoproj_workflow_v1alpha1_git_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HDFSArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HDFSArtifact.md index dd9d437160bb..f0a4cf36bed2 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HDFSArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HDFSArtifact.md @@ -3,10 +3,10 @@ HDFSArtifact is the location of an HDFS artifact ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**path** | **str** | Path is a file path in HDFS | -**addresses** | **[str]** | Addresses is accessible addresses of HDFS name nodes | [optional] +**addresses** | **List[str]** | Addresses is accessible addresses of HDFS name nodes | [optional] **force** | **bool** | Force copies a file forcibly even if it exists | [optional] **hdfs_user** | **str** | HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used. | [optional] **krb_c_cache_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] @@ -15,8 +15,25 @@ Name | Type | Description | Notes **krb_realm** | **str** | KrbRealm is the Kerberos realm used with Kerberos keytab It must be set if keytab is used. | [optional] **krb_service_principal_name** | **str** | KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used. | [optional] **krb_username** | **str** | KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**path** | **str** | Path is a file path in HDFS | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact import IoArgoprojWorkflowV1alpha1HDFSArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1HDFSArtifact from a JSON string +io_argoproj_workflow_v1alpha1_hdfs_artifact_instance = IoArgoprojWorkflowV1alpha1HDFSArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1HDFSArtifact.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_hdfs_artifact_dict = io_argoproj_workflow_v1alpha1_hdfs_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1HDFSArtifact from a dict +io_argoproj_workflow_v1alpha1_hdfs_artifact_form_dict = io_argoproj_workflow_v1alpha1_hdfs_artifact.from_dict(io_argoproj_workflow_v1alpha1_hdfs_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.md index ac59975238f0..a0c251cafe7c 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.md @@ -3,9 +3,10 @@ HDFSArtifactRepository defines the controller configuration for an HDFS artifact repository ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**addresses** | **[str]** | Addresses is accessible addresses of HDFS name nodes | [optional] +**addresses** | **List[str]** | Addresses is accessible addresses of HDFS name nodes | [optional] **force** | **bool** | Force copies a file forcibly even if it exists | [optional] **hdfs_user** | **str** | HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used. | [optional] **krb_c_cache_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] @@ -15,8 +16,24 @@ Name | Type | Description | Notes **krb_service_principal_name** | **str** | KrbServicePrincipalName is the principal name of Kerberos service It must be set if either ccache or keytab is used. | [optional] **krb_username** | **str** | KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used. | [optional] **path_format** | **str** | PathFormat is defines the format of path to store a file. Can reference workflow variables | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_hdfs_artifact_repository import IoArgoprojWorkflowV1alpha1HDFSArtifactRepository + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1HDFSArtifactRepository from a JSON string +io_argoproj_workflow_v1alpha1_hdfs_artifact_repository_instance = IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1HDFSArtifactRepository.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_hdfs_artifact_repository_dict = io_argoproj_workflow_v1alpha1_hdfs_artifact_repository_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1HDFSArtifactRepository from a dict +io_argoproj_workflow_v1alpha1_hdfs_artifact_repository_form_dict = io_argoproj_workflow_v1alpha1_hdfs_artifact_repository.from_dict(io_argoproj_workflow_v1alpha1_hdfs_artifact_repository_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md index da15caee5242..77871fc21848 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTP.md @@ -2,18 +2,35 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**url** | **str** | URL of the HTTP Request | **body** | **str** | Body is content of the HTTP Request | [optional] **body_from** | [**IoArgoprojWorkflowV1alpha1HTTPBodySource**](IoArgoprojWorkflowV1alpha1HTTPBodySource.md) | | [optional] -**headers** | [**[IoArgoprojWorkflowV1alpha1HTTPHeader]**](IoArgoprojWorkflowV1alpha1HTTPHeader.md) | Headers are an optional list of headers to send with HTTP requests | [optional] +**headers** | [**List[IoArgoprojWorkflowV1alpha1HTTPHeader]**](IoArgoprojWorkflowV1alpha1HTTPHeader.md) | Headers are an optional list of headers to send with HTTP requests | [optional] **insecure_skip_verify** | **bool** | InsecureSkipVerify is a bool when if set to true will skip TLS verification for the HTTP client | [optional] **method** | **str** | Method is HTTP methods for HTTP Request | [optional] **success_condition** | **str** | SuccessCondition is an expression if evaluated to true is considered successful | [optional] **timeout_seconds** | **int** | TimeoutSeconds is request timeout for HTTP Request. Default is 30 seconds | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**url** | **str** | URL of the HTTP Request | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http import IoArgoprojWorkflowV1alpha1HTTP + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1HTTP from a JSON string +io_argoproj_workflow_v1alpha1_http_instance = IoArgoprojWorkflowV1alpha1HTTP.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1HTTP.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_http_dict = io_argoproj_workflow_v1alpha1_http_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1HTTP from a dict +io_argoproj_workflow_v1alpha1_http_form_dict = io_argoproj_workflow_v1alpha1_http.from_dict(io_argoproj_workflow_v1alpha1_http_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md index a9f98eccaadc..1f1228a79982 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPArtifact.md @@ -3,13 +3,30 @@ HTTPArtifact allows a file served on HTTP to be placed as an input artifact in a container ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**url** | **str** | URL of the artifact | **auth** | [**IoArgoprojWorkflowV1alpha1HTTPAuth**](IoArgoprojWorkflowV1alpha1HTTPAuth.md) | | [optional] -**headers** | [**[IoArgoprojWorkflowV1alpha1Header]**](IoArgoprojWorkflowV1alpha1Header.md) | Headers are an optional list of headers to send with HTTP requests for artifacts | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**headers** | [**List[IoArgoprojWorkflowV1alpha1Header]**](IoArgoprojWorkflowV1alpha1Header.md) | Headers are an optional list of headers to send with HTTP requests for artifacts | [optional] +**url** | **str** | URL of the artifact | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_artifact import IoArgoprojWorkflowV1alpha1HTTPArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1HTTPArtifact from a JSON string +io_argoproj_workflow_v1alpha1_http_artifact_instance = IoArgoprojWorkflowV1alpha1HTTPArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1HTTPArtifact.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_http_artifact_dict = io_argoproj_workflow_v1alpha1_http_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1HTTPArtifact from a dict +io_argoproj_workflow_v1alpha1_http_artifact_form_dict = io_argoproj_workflow_v1alpha1_http_artifact.from_dict(io_argoproj_workflow_v1alpha1_http_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md index d6132bfbd9b4..5d0c1500738f 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPAuth.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **basic_auth** | [**IoArgoprojWorkflowV1alpha1BasicAuth**](IoArgoprojWorkflowV1alpha1BasicAuth.md) | | [optional] **client_cert** | [**IoArgoprojWorkflowV1alpha1ClientCertAuth**](IoArgoprojWorkflowV1alpha1ClientCertAuth.md) | | [optional] **oauth2** | [**IoArgoprojWorkflowV1alpha1OAuth2Auth**](IoArgoprojWorkflowV1alpha1OAuth2Auth.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_auth import IoArgoprojWorkflowV1alpha1HTTPAuth + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1HTTPAuth from a JSON string +io_argoproj_workflow_v1alpha1_http_auth_instance = IoArgoprojWorkflowV1alpha1HTTPAuth.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1HTTPAuth.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_http_auth_dict = io_argoproj_workflow_v1alpha1_http_auth_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1HTTPAuth from a dict +io_argoproj_workflow_v1alpha1_http_auth_form_dict = io_argoproj_workflow_v1alpha1_http_auth.from_dict(io_argoproj_workflow_v1alpha1_http_auth_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md index 39f71bfa00ef..7a7ae4d2adf0 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPBodySource.md @@ -3,11 +3,28 @@ HTTPBodySource contains the source of the HTTP body. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**bytes** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**bytes** | **bytearray** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_body_source import IoArgoprojWorkflowV1alpha1HTTPBodySource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1HTTPBodySource from a JSON string +io_argoproj_workflow_v1alpha1_http_body_source_instance = IoArgoprojWorkflowV1alpha1HTTPBodySource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1HTTPBodySource.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_http_body_source_dict = io_argoproj_workflow_v1alpha1_http_body_source_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1HTTPBodySource from a dict +io_argoproj_workflow_v1alpha1_http_body_source_form_dict = io_argoproj_workflow_v1alpha1_http_body_source.from_dict(io_argoproj_workflow_v1alpha1_http_body_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPHeader.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPHeader.md index acdec0f6162d..55f45a78748c 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPHeader.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPHeader.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | **value** | **str** | | [optional] **value_from** | [**IoArgoprojWorkflowV1alpha1HTTPHeaderSource**](IoArgoprojWorkflowV1alpha1HTTPHeaderSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_header import IoArgoprojWorkflowV1alpha1HTTPHeader + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1HTTPHeader from a JSON string +io_argoproj_workflow_v1alpha1_http_header_instance = IoArgoprojWorkflowV1alpha1HTTPHeader.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1HTTPHeader.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_http_header_dict = io_argoproj_workflow_v1alpha1_http_header_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1HTTPHeader from a dict +io_argoproj_workflow_v1alpha1_http_header_form_dict = io_argoproj_workflow_v1alpha1_http_header.from_dict(io_argoproj_workflow_v1alpha1_http_header_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPHeaderSource.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPHeaderSource.md index e8916a8ed5ae..8d40fb260e76 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPHeaderSource.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1HTTPHeaderSource.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **secret_key_ref** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_http_header_source import IoArgoprojWorkflowV1alpha1HTTPHeaderSource + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1HTTPHeaderSource from a JSON string +io_argoproj_workflow_v1alpha1_http_header_source_instance = IoArgoprojWorkflowV1alpha1HTTPHeaderSource.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1HTTPHeaderSource.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_http_header_source_dict = io_argoproj_workflow_v1alpha1_http_header_source_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1HTTPHeaderSource from a dict +io_argoproj_workflow_v1alpha1_http_header_source_form_dict = io_argoproj_workflow_v1alpha1_http_header_source.from_dict(io_argoproj_workflow_v1alpha1_http_header_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Header.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Header.md index ea670f0be7e2..ecdfdc6fef09 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Header.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Header.md @@ -3,12 +3,29 @@ Header indicate a key-value request header to be used when fetching artifacts over HTTP ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name is the header name | **value** | **str** | Value is the literal value to use for the header | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_header import IoArgoprojWorkflowV1alpha1Header + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Header from a JSON string +io_argoproj_workflow_v1alpha1_header_instance = IoArgoprojWorkflowV1alpha1Header.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Header.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_header_dict = io_argoproj_workflow_v1alpha1_header_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Header from a dict +io_argoproj_workflow_v1alpha1_header_form_dict = io_argoproj_workflow_v1alpha1_header.from_dict(io_argoproj_workflow_v1alpha1_header_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Histogram.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Histogram.md index d74c6f796b7d..8e19ae23fb31 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Histogram.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Histogram.md @@ -3,12 +3,29 @@ Histogram is a Histogram prometheus metric ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**buckets** | **[float]** | Buckets is a list of bucket divisors for the histogram | +**buckets** | **List[float]** | Buckets is a list of bucket divisors for the histogram | **value** | **str** | Value is the value of the metric | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_histogram import IoArgoprojWorkflowV1alpha1Histogram + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Histogram from a JSON string +io_argoproj_workflow_v1alpha1_histogram_instance = IoArgoprojWorkflowV1alpha1Histogram.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Histogram.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_histogram_dict = io_argoproj_workflow_v1alpha1_histogram_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Histogram from a dict +io_argoproj_workflow_v1alpha1_histogram_form_dict = io_argoproj_workflow_v1alpha1_histogram.from_dict(io_argoproj_workflow_v1alpha1_histogram_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1InfoResponse.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1InfoResponse.md index 114be4fafab8..dcdef4c75687 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1InfoResponse.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1InfoResponse.md @@ -2,15 +2,32 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**columns** | [**[IoArgoprojWorkflowV1alpha1Column]**](IoArgoprojWorkflowV1alpha1Column.md) | | [optional] -**links** | [**[IoArgoprojWorkflowV1alpha1Link]**](IoArgoprojWorkflowV1alpha1Link.md) | | [optional] +**columns** | [**List[IoArgoprojWorkflowV1alpha1Column]**](IoArgoprojWorkflowV1alpha1Column.md) | | [optional] +**links** | [**List[IoArgoprojWorkflowV1alpha1Link]**](IoArgoprojWorkflowV1alpha1Link.md) | | [optional] **managed_namespace** | **str** | | [optional] -**modals** | **{str: (bool,)}** | | [optional] +**modals** | **Dict[str, bool]** | | [optional] **nav_color** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_info_response import IoArgoprojWorkflowV1alpha1InfoResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1InfoResponse from a JSON string +io_argoproj_workflow_v1alpha1_info_response_instance = IoArgoprojWorkflowV1alpha1InfoResponse.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1InfoResponse.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_info_response_dict = io_argoproj_workflow_v1alpha1_info_response_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1InfoResponse from a dict +io_argoproj_workflow_v1alpha1_info_response_form_dict = io_argoproj_workflow_v1alpha1_info_response.from_dict(io_argoproj_workflow_v1alpha1_info_response_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Inputs.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Inputs.md index eb819f1e4ae2..803a6492d1fa 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Inputs.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Inputs.md @@ -3,12 +3,29 @@ Inputs are the mechanism for passing parameters, artifacts, volumes from one template to another ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**artifacts** | [**[IoArgoprojWorkflowV1alpha1Artifact]**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifact are a list of artifacts passed as inputs | [optional] -**parameters** | [**[IoArgoprojWorkflowV1alpha1Parameter]**](IoArgoprojWorkflowV1alpha1Parameter.md) | Parameters are a list of parameters passed as inputs | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**artifacts** | [**List[IoArgoprojWorkflowV1alpha1Artifact]**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifact are a list of artifacts passed as inputs | [optional] +**parameters** | [**List[IoArgoprojWorkflowV1alpha1Parameter]**](IoArgoprojWorkflowV1alpha1Parameter.md) | Parameters are a list of parameters passed as inputs | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_inputs import IoArgoprojWorkflowV1alpha1Inputs + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Inputs from a JSON string +io_argoproj_workflow_v1alpha1_inputs_instance = IoArgoprojWorkflowV1alpha1Inputs.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Inputs.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_inputs_dict = io_argoproj_workflow_v1alpha1_inputs_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Inputs from a dict +io_argoproj_workflow_v1alpha1_inputs_form_dict = io_argoproj_workflow_v1alpha1_inputs.from_dict(io_argoproj_workflow_v1alpha1_inputs_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelKeys.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelKeys.md index e2c084e7372f..9d8a9422e8e5 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelKeys.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelKeys.md @@ -3,11 +3,28 @@ LabelKeys is list of keys ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | **[str]** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**items** | **List[str]** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_keys import IoArgoprojWorkflowV1alpha1LabelKeys + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1LabelKeys from a JSON string +io_argoproj_workflow_v1alpha1_label_keys_instance = IoArgoprojWorkflowV1alpha1LabelKeys.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1LabelKeys.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_label_keys_dict = io_argoproj_workflow_v1alpha1_label_keys_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1LabelKeys from a dict +io_argoproj_workflow_v1alpha1_label_keys_form_dict = io_argoproj_workflow_v1alpha1_label_keys.from_dict(io_argoproj_workflow_v1alpha1_label_keys_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelValueFrom.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelValueFrom.md index 4b0f105e0509..fca46c96b368 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelValueFrom.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelValueFrom.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **expression** | **str** | | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_value_from import IoArgoprojWorkflowV1alpha1LabelValueFrom + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1LabelValueFrom from a JSON string +io_argoproj_workflow_v1alpha1_label_value_from_instance = IoArgoprojWorkflowV1alpha1LabelValueFrom.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1LabelValueFrom.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_label_value_from_dict = io_argoproj_workflow_v1alpha1_label_value_from_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1LabelValueFrom from a dict +io_argoproj_workflow_v1alpha1_label_value_from_form_dict = io_argoproj_workflow_v1alpha1_label_value_from.from_dict(io_argoproj_workflow_v1alpha1_label_value_from_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelValues.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelValues.md index 173b3a523003..a4d82eb3ab30 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelValues.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LabelValues.md @@ -3,11 +3,28 @@ Labels is list of workflow labels ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | **[str]** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**items** | **List[str]** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_label_values import IoArgoprojWorkflowV1alpha1LabelValues + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1LabelValues from a JSON string +io_argoproj_workflow_v1alpha1_label_values_instance = IoArgoprojWorkflowV1alpha1LabelValues.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1LabelValues.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_label_values_dict = io_argoproj_workflow_v1alpha1_label_values_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1LabelValues from a dict +io_argoproj_workflow_v1alpha1_label_values_form_dict = io_argoproj_workflow_v1alpha1_label_values.from_dict(io_argoproj_workflow_v1alpha1_label_values_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md index b269599dd23b..1449abff174d 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LifecycleHook.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **arguments** | [**IoArgoprojWorkflowV1alpha1Arguments**](IoArgoprojWorkflowV1alpha1Arguments.md) | | [optional] **expression** | **str** | Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored | [optional] **template** | **str** | Template is the name of the template to execute by the hook | [optional] **template_ref** | [**IoArgoprojWorkflowV1alpha1TemplateRef**](IoArgoprojWorkflowV1alpha1TemplateRef.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lifecycle_hook import IoArgoprojWorkflowV1alpha1LifecycleHook + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1LifecycleHook from a JSON string +io_argoproj_workflow_v1alpha1_lifecycle_hook_instance = IoArgoprojWorkflowV1alpha1LifecycleHook.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1LifecycleHook.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_lifecycle_hook_dict = io_argoproj_workflow_v1alpha1_lifecycle_hook_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1LifecycleHook from a dict +io_argoproj_workflow_v1alpha1_lifecycle_hook_form_dict = io_argoproj_workflow_v1alpha1_lifecycle_hook.from_dict(io_argoproj_workflow_v1alpha1_lifecycle_hook_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Link.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Link.md index d45f0220ef51..f04341ec0e33 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Link.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Link.md @@ -3,13 +3,30 @@ A link to another app. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | The name of the link, E.g. \"Workflow Logs\" or \"Pod Logs\" | **scope** | **str** | \"workflow\", \"pod\", \"pod-logs\", \"event-source-logs\", \"sensor-logs\", \"workflow-list\" or \"chat\" | **url** | **str** | The URL. Can contain \"${metadata.namespace}\", \"${metadata.name}\", \"${status.startedAt}\", \"${status.finishedAt}\" or any other element in workflow yaml, e.g. \"${io.argoproj.workflow.v1alpha1.metadata.annotations.userDefinedKey}\" | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_link import IoArgoprojWorkflowV1alpha1Link + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Link from a JSON string +io_argoproj_workflow_v1alpha1_link_instance = IoArgoprojWorkflowV1alpha1Link.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Link.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_link_dict = io_argoproj_workflow_v1alpha1_link_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Link from a dict +io_argoproj_workflow_v1alpha1_link_form_dict = io_argoproj_workflow_v1alpha1_link.from_dict(io_argoproj_workflow_v1alpha1_link_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest.md index 3c4f207f8913..0cbc4f28633d 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cron_workflow** | [**IoArgoprojWorkflowV1alpha1CronWorkflow**](IoArgoprojWorkflowV1alpha1CronWorkflow.md) | | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_lint_cron_workflow_request import IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest from a JSON string +io_argoproj_workflow_v1alpha1_lint_cron_workflow_request_instance = IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_lint_cron_workflow_request_dict = io_argoproj_workflow_v1alpha1_lint_cron_workflow_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1LintCronWorkflowRequest from a dict +io_argoproj_workflow_v1alpha1_lint_cron_workflow_request_form_dict = io_argoproj_workflow_v1alpha1_lint_cron_workflow_request.from_dict(io_argoproj_workflow_v1alpha1_lint_cron_workflow_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LogEntry.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LogEntry.md index 36b1aa86b4ec..b11d322e36f1 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LogEntry.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1LogEntry.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **content** | **str** | | [optional] **pod_name** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_log_entry import IoArgoprojWorkflowV1alpha1LogEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1LogEntry from a JSON string +io_argoproj_workflow_v1alpha1_log_entry_instance = IoArgoprojWorkflowV1alpha1LogEntry.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1LogEntry.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_log_entry_dict = io_argoproj_workflow_v1alpha1_log_entry_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1LogEntry from a dict +io_argoproj_workflow_v1alpha1_log_entry_form_dict = io_argoproj_workflow_v1alpha1_log_entry.from_dict(io_argoproj_workflow_v1alpha1_log_entry_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md index 8fbb31c80b6c..d3f2100d22b7 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ManifestFrom.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **artifact** | [**IoArgoprojWorkflowV1alpha1Artifact**](IoArgoprojWorkflowV1alpha1Artifact.md) | | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_manifest_from import IoArgoprojWorkflowV1alpha1ManifestFrom + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ManifestFrom from a JSON string +io_argoproj_workflow_v1alpha1_manifest_from_instance = IoArgoprojWorkflowV1alpha1ManifestFrom.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ManifestFrom.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_manifest_from_dict = io_argoproj_workflow_v1alpha1_manifest_from_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ManifestFrom from a dict +io_argoproj_workflow_v1alpha1_manifest_from_form_dict = io_argoproj_workflow_v1alpha1_manifest_from.from_dict(io_argoproj_workflow_v1alpha1_manifest_from_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MemoizationStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MemoizationStatus.md index 8a062a7a8585..6aa0485d37cc 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MemoizationStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MemoizationStatus.md @@ -3,13 +3,30 @@ MemoizationStatus is the status of this memoized node ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cache_name** | **str** | Cache is the name of the cache that was used | **hit** | **bool** | Hit indicates whether this node was created from a cache entry | **key** | **str** | Key is the name of the key used for this node's cache | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_memoization_status import IoArgoprojWorkflowV1alpha1MemoizationStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1MemoizationStatus from a JSON string +io_argoproj_workflow_v1alpha1_memoization_status_instance = IoArgoprojWorkflowV1alpha1MemoizationStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1MemoizationStatus.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_memoization_status_dict = io_argoproj_workflow_v1alpha1_memoization_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1MemoizationStatus from a dict +io_argoproj_workflow_v1alpha1_memoization_status_form_dict = io_argoproj_workflow_v1alpha1_memoization_status.from_dict(io_argoproj_workflow_v1alpha1_memoization_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Memoize.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Memoize.md index a9a94ba95eac..ab0ed553c3da 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Memoize.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Memoize.md @@ -3,13 +3,30 @@ Memoization enables caching for the Outputs of the template ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cache** | [**IoArgoprojWorkflowV1alpha1Cache**](IoArgoprojWorkflowV1alpha1Cache.md) | | **key** | **str** | Key is the key to use as the caching key | **max_age** | **str** | MaxAge is the maximum age (e.g. \"180s\", \"24h\") of an entry that is still considered valid. If an entry is older than the MaxAge, it will be ignored. | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_memoize import IoArgoprojWorkflowV1alpha1Memoize + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Memoize from a JSON string +io_argoproj_workflow_v1alpha1_memoize_instance = IoArgoprojWorkflowV1alpha1Memoize.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Memoize.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_memoize_dict = io_argoproj_workflow_v1alpha1_memoize_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Memoize from a dict +io_argoproj_workflow_v1alpha1_memoize_form_dict = io_argoproj_workflow_v1alpha1_memoize.from_dict(io_argoproj_workflow_v1alpha1_memoize_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Metadata.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Metadata.md index 3961e38f9d26..e09d9316bc52 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Metadata.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Metadata.md @@ -3,12 +3,29 @@ Pod metdata ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**annotations** | **{str: (str,)}** | | [optional] -**labels** | **{str: (str,)}** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**annotations** | **Dict[str, str]** | | [optional] +**labels** | **Dict[str, str]** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metadata import IoArgoprojWorkflowV1alpha1Metadata + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Metadata from a JSON string +io_argoproj_workflow_v1alpha1_metadata_instance = IoArgoprojWorkflowV1alpha1Metadata.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Metadata.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_metadata_dict = io_argoproj_workflow_v1alpha1_metadata_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Metadata from a dict +io_argoproj_workflow_v1alpha1_metadata_form_dict = io_argoproj_workflow_v1alpha1_metadata.from_dict(io_argoproj_workflow_v1alpha1_metadata_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MetricLabel.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MetricLabel.md index bd58b227b5df..e718513030b5 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MetricLabel.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MetricLabel.md @@ -3,12 +3,29 @@ MetricLabel is a single label for a prometheus metric ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | | **value** | **str** | | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metric_label import IoArgoprojWorkflowV1alpha1MetricLabel + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1MetricLabel from a JSON string +io_argoproj_workflow_v1alpha1_metric_label_instance = IoArgoprojWorkflowV1alpha1MetricLabel.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1MetricLabel.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_metric_label_dict = io_argoproj_workflow_v1alpha1_metric_label_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1MetricLabel from a dict +io_argoproj_workflow_v1alpha1_metric_label_form_dict = io_argoproj_workflow_v1alpha1_metric_label.from_dict(io_argoproj_workflow_v1alpha1_metric_label_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Metrics.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Metrics.md index 638ff778a47f..f0b9171ef2b5 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Metrics.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Metrics.md @@ -3,11 +3,28 @@ Metrics are a list of metrics emitted from a Workflow/Template ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**prometheus** | [**[IoArgoprojWorkflowV1alpha1Prometheus]**](IoArgoprojWorkflowV1alpha1Prometheus.md) | Prometheus is a list of prometheus metrics to be emitted | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**prometheus** | [**List[IoArgoprojWorkflowV1alpha1Prometheus]**](IoArgoprojWorkflowV1alpha1Prometheus.md) | Prometheus is a list of prometheus metrics to be emitted | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_metrics import IoArgoprojWorkflowV1alpha1Metrics + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Metrics from a JSON string +io_argoproj_workflow_v1alpha1_metrics_instance = IoArgoprojWorkflowV1alpha1Metrics.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Metrics.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_metrics_dict = io_argoproj_workflow_v1alpha1_metrics_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Metrics from a dict +io_argoproj_workflow_v1alpha1_metrics_form_dict = io_argoproj_workflow_v1alpha1_metrics.from_dict(io_argoproj_workflow_v1alpha1_metrics_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Mutex.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Mutex.md index 1075ed386944..4d693e5939f5 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Mutex.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Mutex.md @@ -3,12 +3,29 @@ Mutex holds Mutex configuration ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | name of the mutex | [optional] **namespace** | **str** | Namespace is the namespace of the mutex, default: [namespace of workflow] | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex import IoArgoprojWorkflowV1alpha1Mutex + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Mutex from a JSON string +io_argoproj_workflow_v1alpha1_mutex_instance = IoArgoprojWorkflowV1alpha1Mutex.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Mutex.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_mutex_dict = io_argoproj_workflow_v1alpha1_mutex_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Mutex from a dict +io_argoproj_workflow_v1alpha1_mutex_form_dict = io_argoproj_workflow_v1alpha1_mutex.from_dict(io_argoproj_workflow_v1alpha1_mutex_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MutexHolding.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MutexHolding.md index 4d91e3436c10..25c1525a54c8 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MutexHolding.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MutexHolding.md @@ -3,12 +3,29 @@ MutexHolding describes the mutex and the object which is holding it. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **holder** | **str** | Holder is a reference to the object which holds the Mutex. Holding Scenario: 1. Current workflow's NodeID which is holding the lock. e.g: ${NodeID} Waiting Scenario: 1. Current workflow or other workflow NodeID which is holding the lock. e.g: ${WorkflowName}/${NodeID} | [optional] **mutex** | **str** | Reference for the mutex e.g: ${namespace}/mutex/${mutexName} | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex_holding import IoArgoprojWorkflowV1alpha1MutexHolding + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1MutexHolding from a JSON string +io_argoproj_workflow_v1alpha1_mutex_holding_instance = IoArgoprojWorkflowV1alpha1MutexHolding.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1MutexHolding.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_mutex_holding_dict = io_argoproj_workflow_v1alpha1_mutex_holding_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1MutexHolding from a dict +io_argoproj_workflow_v1alpha1_mutex_holding_form_dict = io_argoproj_workflow_v1alpha1_mutex_holding.from_dict(io_argoproj_workflow_v1alpha1_mutex_holding_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MutexStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MutexStatus.md index 5af501784bb3..ae54354e8970 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MutexStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1MutexStatus.md @@ -3,12 +3,29 @@ MutexStatus contains which objects hold mutex locks, and which objects this workflow is waiting on to release locks. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**holding** | [**[IoArgoprojWorkflowV1alpha1MutexHolding]**](IoArgoprojWorkflowV1alpha1MutexHolding.md) | Holding is a list of mutexes and their respective objects that are held by mutex lock for this io.argoproj.workflow.v1alpha1. | [optional] -**waiting** | [**[IoArgoprojWorkflowV1alpha1MutexHolding]**](IoArgoprojWorkflowV1alpha1MutexHolding.md) | Waiting is a list of mutexes and their respective objects this workflow is waiting for. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**holding** | [**List[IoArgoprojWorkflowV1alpha1MutexHolding]**](IoArgoprojWorkflowV1alpha1MutexHolding.md) | Holding is a list of mutexes and their respective objects that are held by mutex lock for this io.argoproj.workflow.v1alpha1. | [optional] +**waiting** | [**List[IoArgoprojWorkflowV1alpha1MutexHolding]**](IoArgoprojWorkflowV1alpha1MutexHolding.md) | Waiting is a list of mutexes and their respective objects this workflow is waiting for. | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_mutex_status import IoArgoprojWorkflowV1alpha1MutexStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1MutexStatus from a JSON string +io_argoproj_workflow_v1alpha1_mutex_status_instance = IoArgoprojWorkflowV1alpha1MutexStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1MutexStatus.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_mutex_status_dict = io_argoproj_workflow_v1alpha1_mutex_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1MutexStatus from a dict +io_argoproj_workflow_v1alpha1_mutex_status_form_dict = io_argoproj_workflow_v1alpha1_mutex_status.from_dict(io_argoproj_workflow_v1alpha1_mutex_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeFlag.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeFlag.md index f9113f86d045..d2c7b1a561fe 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeFlag.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeFlag.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **hooked** | **bool** | Hooked tracks whether or not this node was triggered by hook or onExit | [optional] **retried** | **bool** | Retried tracks whether or not this node was retried by retryStrategy | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_flag import IoArgoprojWorkflowV1alpha1NodeFlag + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1NodeFlag from a JSON string +io_argoproj_workflow_v1alpha1_node_flag_instance = IoArgoprojWorkflowV1alpha1NodeFlag.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1NodeFlag.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_node_flag_dict = io_argoproj_workflow_v1alpha1_node_flag_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1NodeFlag from a dict +io_argoproj_workflow_v1alpha1_node_flag_form_dict = io_argoproj_workflow_v1alpha1_node_flag.from_dict(io_argoproj_workflow_v1alpha1_node_flag_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeResult.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeResult.md index 9790ceffb93c..0ad8f57db086 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeResult.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeResult.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **message** | **str** | | [optional] **outputs** | [**IoArgoprojWorkflowV1alpha1Outputs**](IoArgoprojWorkflowV1alpha1Outputs.md) | | [optional] **phase** | **str** | | [optional] **progress** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_result import IoArgoprojWorkflowV1alpha1NodeResult + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1NodeResult from a JSON string +io_argoproj_workflow_v1alpha1_node_result_instance = IoArgoprojWorkflowV1alpha1NodeResult.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1NodeResult.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_node_result_dict = io_argoproj_workflow_v1alpha1_node_result_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1NodeResult from a dict +io_argoproj_workflow_v1alpha1_node_result_form_dict = io_argoproj_workflow_v1alpha1_node_result.from_dict(io_argoproj_workflow_v1alpha1_node_result_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeStatus.md index d1c5d7fa3e80..697829878a53 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeStatus.md @@ -3,35 +3,52 @@ NodeStatus contains status information about an individual node in the workflow ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**id** | **str** | ID is a unique identifier of a node within the worklow It is implemented as a hash of the node name, which makes the ID deterministic | -**name** | **str** | Name is unique name in the node tree used to generate the node ID | -**type** | **str** | Type indicates type of node | **boundary_id** | **str** | BoundaryID indicates the node ID of the associated template root node in which this node belongs to | [optional] -**children** | **[str]** | Children is a list of child node IDs | [optional] +**children** | **List[str]** | Children is a list of child node IDs | [optional] **daemoned** | **bool** | Daemoned tracks whether or not this node was daemoned and need to be terminated | [optional] **display_name** | **str** | DisplayName is a human readable representation of the node. Unique within a template boundary | [optional] **estimated_duration** | **int** | EstimatedDuration in seconds. | [optional] **finished_at** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **host_node_name** | **str** | HostNodeName name of the Kubernetes node on which the Pod is running, if applicable | [optional] +**id** | **str** | ID is a unique identifier of a node within the worklow It is implemented as a hash of the node name, which makes the ID deterministic | **inputs** | [**IoArgoprojWorkflowV1alpha1Inputs**](IoArgoprojWorkflowV1alpha1Inputs.md) | | [optional] **memoization_status** | [**IoArgoprojWorkflowV1alpha1MemoizationStatus**](IoArgoprojWorkflowV1alpha1MemoizationStatus.md) | | [optional] **message** | **str** | A human readable message indicating details about why the node is in this condition. | [optional] +**name** | **str** | Name is unique name in the node tree used to generate the node ID | **node_flag** | [**IoArgoprojWorkflowV1alpha1NodeFlag**](IoArgoprojWorkflowV1alpha1NodeFlag.md) | | [optional] -**outbound_nodes** | **[str]** | OutboundNodes tracks the node IDs which are considered \"outbound\" nodes to a template invocation. For every invocation of a template, there are nodes which we considered as \"outbound\". Essentially, these are last nodes in the execution sequence to run, before the template is considered completed. These nodes are then connected as parents to a following step. In the case of single pod steps (i.e. container, script, resource templates), this list will be nil since the pod itself is already considered the \"outbound\" node. In the case of DAGs, outbound nodes are the \"target\" tasks (tasks with no children). In the case of steps, outbound nodes are all the containers involved in the last step group. NOTE: since templates are composable, the list of outbound nodes are carried upwards when a DAG/steps template invokes another DAG/steps template. In other words, the outbound nodes of a template, will be a superset of the outbound nodes of its last children. | [optional] +**outbound_nodes** | **List[str]** | OutboundNodes tracks the node IDs which are considered \"outbound\" nodes to a template invocation. For every invocation of a template, there are nodes which we considered as \"outbound\". Essentially, these are last nodes in the execution sequence to run, before the template is considered completed. These nodes are then connected as parents to a following step. In the case of single pod steps (i.e. container, script, resource templates), this list will be nil since the pod itself is already considered the \"outbound\" node. In the case of DAGs, outbound nodes are the \"target\" tasks (tasks with no children). In the case of steps, outbound nodes are all the containers involved in the last step group. NOTE: since templates are composable, the list of outbound nodes are carried upwards when a DAG/steps template invokes another DAG/steps template. In other words, the outbound nodes of a template, will be a superset of the outbound nodes of its last children. | [optional] **outputs** | [**IoArgoprojWorkflowV1alpha1Outputs**](IoArgoprojWorkflowV1alpha1Outputs.md) | | [optional] **phase** | **str** | Phase a simple, high-level summary of where the node is in its lifecycle. Can be used as a state machine. Will be one of these values \"Pending\", \"Running\" before the node is completed, or \"Succeeded\", \"Skipped\", \"Failed\", \"Error\", or \"Omitted\" as a final state. | [optional] **pod_ip** | **str** | PodIP captures the IP of the pod for daemoned steps | [optional] **progress** | **str** | Progress to completion | [optional] -**resources_duration** | **{str: (int,)}** | ResourcesDuration is indicative, but not accurate, resource duration. This is populated when the nodes completes. | [optional] +**resources_duration** | **Dict[str, int]** | ResourcesDuration is indicative, but not accurate, resource duration. This is populated when the nodes completes. | [optional] **started_at** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **synchronization_status** | [**IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus**](IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus.md) | | [optional] **template_name** | **str** | TemplateName is the template name which this node corresponds to. Not applicable to virtual nodes (e.g. Retry, StepGroup) | [optional] **template_ref** | [**IoArgoprojWorkflowV1alpha1TemplateRef**](IoArgoprojWorkflowV1alpha1TemplateRef.md) | | [optional] **template_scope** | **str** | TemplateScope is the template scope in which the template of this node was retrieved. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**type** | **str** | Type indicates type of node | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_status import IoArgoprojWorkflowV1alpha1NodeStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1NodeStatus from a JSON string +io_argoproj_workflow_v1alpha1_node_status_instance = IoArgoprojWorkflowV1alpha1NodeStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1NodeStatus.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_node_status_dict = io_argoproj_workflow_v1alpha1_node_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1NodeStatus from a dict +io_argoproj_workflow_v1alpha1_node_status_form_dict = io_argoproj_workflow_v1alpha1_node_status.from_dict(io_argoproj_workflow_v1alpha1_node_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus.md index 0d9a4fb1b3fb..6318469f2732 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus.md @@ -3,11 +3,28 @@ NodeSynchronizationStatus stores the status of a node ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **waiting** | **str** | Waiting is the name of the lock that this node is waiting for | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_node_synchronization_status import IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus from a JSON string +io_argoproj_workflow_v1alpha1_node_synchronization_status_instance = IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_node_synchronization_status_dict = io_argoproj_workflow_v1alpha1_node_synchronization_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus from a dict +io_argoproj_workflow_v1alpha1_node_synchronization_status_form_dict = io_argoproj_workflow_v1alpha1_node_synchronization_status.from_dict(io_argoproj_workflow_v1alpha1_node_synchronization_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md index 094f934aeb28..d4ba3f08fe64 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2Auth.md @@ -3,15 +3,32 @@ OAuth2Auth holds all information for client authentication via OAuth2 tokens ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **client_id_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **client_secret_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**endpoint_params** | [**[IoArgoprojWorkflowV1alpha1OAuth2EndpointParam]**](IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md) | | [optional] -**scopes** | **[str]** | | [optional] +**endpoint_params** | [**List[IoArgoprojWorkflowV1alpha1OAuth2EndpointParam]**](IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md) | | [optional] +**scopes** | **List[str]** | | [optional] **token_url_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_o_auth2_auth import IoArgoprojWorkflowV1alpha1OAuth2Auth + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1OAuth2Auth from a JSON string +io_argoproj_workflow_v1alpha1_o_auth2_auth_instance = IoArgoprojWorkflowV1alpha1OAuth2Auth.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1OAuth2Auth.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_o_auth2_auth_dict = io_argoproj_workflow_v1alpha1_o_auth2_auth_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1OAuth2Auth from a dict +io_argoproj_workflow_v1alpha1_o_auth2_auth_form_dict = io_argoproj_workflow_v1alpha1_o_auth2_auth.from_dict(io_argoproj_workflow_v1alpha1_o_auth2_auth_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md index 8b30d7b2619e..4ceb1674d8d4 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.md @@ -3,12 +3,29 @@ EndpointParam is for requesting optional fields that should be sent in the oauth request ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | Name is the header name | **value** | **str** | Value is the literal value to use for the header | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param import IoArgoprojWorkflowV1alpha1OAuth2EndpointParam + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1OAuth2EndpointParam from a JSON string +io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param_instance = IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1OAuth2EndpointParam.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param_dict = io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1OAuth2EndpointParam from a dict +io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param_form_dict = io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param.from_dict(io_argoproj_workflow_v1alpha1_o_auth2_endpoint_param_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSArtifact.md index 9c0eeb6eca10..94b6414c46e5 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSArtifact.md @@ -3,19 +3,36 @@ OSSArtifact is the location of an Alibaba Cloud OSS artifact ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**key** | **str** | Key is the path in the bucket where the artifact resides | **access_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **bucket** | **str** | Bucket is the name of the bucket | [optional] **create_bucket_if_not_present** | **bool** | CreateBucketIfNotPresent tells the driver to attempt to create the OSS bucket for output artifacts, if it doesn't exist | [optional] **endpoint** | **str** | Endpoint is the hostname of the bucket endpoint | [optional] +**key** | **str** | Key is the path in the bucket where the artifact resides | **lifecycle_rule** | [**IoArgoprojWorkflowV1alpha1OSSLifecycleRule**](IoArgoprojWorkflowV1alpha1OSSLifecycleRule.md) | | [optional] **secret_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **security_token** | **str** | SecurityToken is the user's temporary security token. For more details, check out: https://www.alibabacloud.com/help/doc-detail/100624.htm | [optional] **use_sdk_creds** | **bool** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact import IoArgoprojWorkflowV1alpha1OSSArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1OSSArtifact from a JSON string +io_argoproj_workflow_v1alpha1_oss_artifact_instance = IoArgoprojWorkflowV1alpha1OSSArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1OSSArtifact.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_oss_artifact_dict = io_argoproj_workflow_v1alpha1_oss_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1OSSArtifact from a dict +io_argoproj_workflow_v1alpha1_oss_artifact_form_dict = io_argoproj_workflow_v1alpha1_oss_artifact.from_dict(io_argoproj_workflow_v1alpha1_oss_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSArtifactRepository.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSArtifactRepository.md index 2d628a8fa2b5..611917b5ee36 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSArtifactRepository.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSArtifactRepository.md @@ -3,6 +3,7 @@ OSSArtifactRepository defines the controller configuration for an OSS artifact repository ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] @@ -14,8 +15,24 @@ Name | Type | Description | Notes **secret_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **security_token** | **str** | SecurityToken is the user's temporary security token. For more details, check out: https://www.alibabacloud.com/help/doc-detail/100624.htm | [optional] **use_sdk_creds** | **bool** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_artifact_repository import IoArgoprojWorkflowV1alpha1OSSArtifactRepository + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1OSSArtifactRepository from a JSON string +io_argoproj_workflow_v1alpha1_oss_artifact_repository_instance = IoArgoprojWorkflowV1alpha1OSSArtifactRepository.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1OSSArtifactRepository.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_oss_artifact_repository_dict = io_argoproj_workflow_v1alpha1_oss_artifact_repository_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1OSSArtifactRepository from a dict +io_argoproj_workflow_v1alpha1_oss_artifact_repository_form_dict = io_argoproj_workflow_v1alpha1_oss_artifact_repository.from_dict(io_argoproj_workflow_v1alpha1_oss_artifact_repository_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSLifecycleRule.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSLifecycleRule.md index 7682231daf5d..573a15ac2cd0 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSLifecycleRule.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1OSSLifecycleRule.md @@ -3,12 +3,29 @@ OSSLifecycleRule specifies how to manage bucket's lifecycle ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **mark_deletion_after_days** | **int** | MarkDeletionAfterDays is the number of days before we delete objects in the bucket | [optional] **mark_infrequent_access_after_days** | **int** | MarkInfrequentAccessAfterDays is the number of days before we convert the objects in the bucket to Infrequent Access (IA) storage type | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_oss_lifecycle_rule import IoArgoprojWorkflowV1alpha1OSSLifecycleRule + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1OSSLifecycleRule from a JSON string +io_argoproj_workflow_v1alpha1_oss_lifecycle_rule_instance = IoArgoprojWorkflowV1alpha1OSSLifecycleRule.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1OSSLifecycleRule.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_oss_lifecycle_rule_dict = io_argoproj_workflow_v1alpha1_oss_lifecycle_rule_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1OSSLifecycleRule from a dict +io_argoproj_workflow_v1alpha1_oss_lifecycle_rule_form_dict = io_argoproj_workflow_v1alpha1_oss_lifecycle_rule.from_dict(io_argoproj_workflow_v1alpha1_oss_lifecycle_rule_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Outputs.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Outputs.md index ecbd149b5da5..e63089ac5395 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Outputs.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Outputs.md @@ -3,14 +3,31 @@ Outputs hold parameters, artifacts, and results from a step ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**artifacts** | [**[IoArgoprojWorkflowV1alpha1Artifact]**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifacts holds the list of output artifacts produced by a step | [optional] +**artifacts** | [**List[IoArgoprojWorkflowV1alpha1Artifact]**](IoArgoprojWorkflowV1alpha1Artifact.md) | Artifacts holds the list of output artifacts produced by a step | [optional] **exit_code** | **str** | ExitCode holds the exit code of a script template | [optional] -**parameters** | [**[IoArgoprojWorkflowV1alpha1Parameter]**](IoArgoprojWorkflowV1alpha1Parameter.md) | Parameters holds the list of output parameters produced by a step | [optional] +**parameters** | [**List[IoArgoprojWorkflowV1alpha1Parameter]**](IoArgoprojWorkflowV1alpha1Parameter.md) | Parameters holds the list of output parameters produced by a step | [optional] **result** | **str** | Result holds the result (stdout) of a script template | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_outputs import IoArgoprojWorkflowV1alpha1Outputs + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Outputs from a JSON string +io_argoproj_workflow_v1alpha1_outputs_instance = IoArgoprojWorkflowV1alpha1Outputs.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Outputs.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_outputs_dict = io_argoproj_workflow_v1alpha1_outputs_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Outputs from a dict +io_argoproj_workflow_v1alpha1_outputs_form_dict = io_argoproj_workflow_v1alpha1_outputs.from_dict(io_argoproj_workflow_v1alpha1_outputs_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ParallelSteps.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ParallelSteps.md index be81e14f96f6..5ee986241494 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ParallelSteps.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ParallelSteps.md @@ -2,10 +2,27 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**value** | [**[IoArgoprojWorkflowV1alpha1WorkflowStep]**](IoArgoprojWorkflowV1alpha1WorkflowStep.md) | | +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parallel_steps import IoArgoprojWorkflowV1alpha1ParallelSteps + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ParallelSteps from a JSON string +io_argoproj_workflow_v1alpha1_parallel_steps_instance = IoArgoprojWorkflowV1alpha1ParallelSteps.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ParallelSteps.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_parallel_steps_dict = io_argoproj_workflow_v1alpha1_parallel_steps_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ParallelSteps from a dict +io_argoproj_workflow_v1alpha1_parallel_steps_form_dict = io_argoproj_workflow_v1alpha1_parallel_steps.from_dict(io_argoproj_workflow_v1alpha1_parallel_steps_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Parameter.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Parameter.md index 11eae6158683..0f758453a7d7 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Parameter.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Parameter.md @@ -3,17 +3,34 @@ Parameter indicate a passed string parameter to a service template with an optional default value ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **str** | Name is the parameter name | **default** | **str** | Default is the default value to use for an input parameter if a value was not supplied | [optional] **description** | **str** | Description is the parameter description | [optional] -**enum** | **[str]** | Enum holds a list of string values to choose from, for the actual value of the parameter | [optional] +**enum** | **List[str]** | Enum holds a list of string values to choose from, for the actual value of the parameter | [optional] **global_name** | **str** | GlobalName exports an output parameter to the global scope, making it available as '{{io.argoproj.workflow.v1alpha1.outputs.parameters.XXXX}} and in workflow.status.outputs.parameters | [optional] +**name** | **str** | Name is the parameter name | **value** | **str** | Value is the literal value to use for the parameter. If specified in the context of an input parameter, the value takes precedence over any passed values | [optional] **value_from** | [**IoArgoprojWorkflowV1alpha1ValueFrom**](IoArgoprojWorkflowV1alpha1ValueFrom.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_parameter import IoArgoprojWorkflowV1alpha1Parameter + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Parameter from a JSON string +io_argoproj_workflow_v1alpha1_parameter_instance = IoArgoprojWorkflowV1alpha1Parameter.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Parameter.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_parameter_dict = io_argoproj_workflow_v1alpha1_parameter_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Parameter from a dict +io_argoproj_workflow_v1alpha1_parameter_form_dict = io_argoproj_workflow_v1alpha1_parameter.from_dict(io_argoproj_workflow_v1alpha1_parameter_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1PodGC.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1PodGC.md index 5d30ba94296a..a2b3f6f25f1a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1PodGC.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1PodGC.md @@ -3,13 +3,30 @@ PodGC describes how to delete completed pods as they complete ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **delete_delay_duration** | [**Duration**](Duration.md) | | [optional] **label_selector** | [**LabelSelector**](LabelSelector.md) | | [optional] **strategy** | **str** | Strategy is the strategy to use. One of \"OnPodCompletion\", \"OnPodSuccess\", \"OnWorkflowCompletion\", \"OnWorkflowSuccess\". If unset, does not delete Pods | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_pod_gc import IoArgoprojWorkflowV1alpha1PodGC + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1PodGC from a JSON string +io_argoproj_workflow_v1alpha1_pod_gc_instance = IoArgoprojWorkflowV1alpha1PodGC.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1PodGC.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_pod_gc_dict = io_argoproj_workflow_v1alpha1_pod_gc_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1PodGC from a dict +io_argoproj_workflow_v1alpha1_pod_gc_form_dict = io_argoproj_workflow_v1alpha1_pod_gc.from_dict(io_argoproj_workflow_v1alpha1_pod_gc_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Prometheus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Prometheus.md index a3ac68561cfb..53176c89aff5 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Prometheus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Prometheus.md @@ -3,17 +3,34 @@ Prometheus is a prometheus metric to be emitted ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**help** | **str** | Help is a string that describes the metric | -**name** | **str** | Name is the name of the metric | **counter** | [**IoArgoprojWorkflowV1alpha1Counter**](IoArgoprojWorkflowV1alpha1Counter.md) | | [optional] **gauge** | [**IoArgoprojWorkflowV1alpha1Gauge**](IoArgoprojWorkflowV1alpha1Gauge.md) | | [optional] +**help** | **str** | Help is a string that describes the metric | **histogram** | [**IoArgoprojWorkflowV1alpha1Histogram**](IoArgoprojWorkflowV1alpha1Histogram.md) | | [optional] -**labels** | [**[IoArgoprojWorkflowV1alpha1MetricLabel]**](IoArgoprojWorkflowV1alpha1MetricLabel.md) | Labels is a list of metric labels | [optional] +**labels** | [**List[IoArgoprojWorkflowV1alpha1MetricLabel]**](IoArgoprojWorkflowV1alpha1MetricLabel.md) | Labels is a list of metric labels | [optional] +**name** | **str** | Name is the name of the metric | **when** | **str** | When is a conditional statement that decides when to emit the metric | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_prometheus import IoArgoprojWorkflowV1alpha1Prometheus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Prometheus from a JSON string +io_argoproj_workflow_v1alpha1_prometheus_instance = IoArgoprojWorkflowV1alpha1Prometheus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Prometheus.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_prometheus_dict = io_argoproj_workflow_v1alpha1_prometheus_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Prometheus from a dict +io_argoproj_workflow_v1alpha1_prometheus_form_dict = io_argoproj_workflow_v1alpha1_prometheus.from_dict(io_argoproj_workflow_v1alpha1_prometheus_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RawArtifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RawArtifact.md index fb54c3943ae1..a9a7a408d2ac 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RawArtifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RawArtifact.md @@ -3,11 +3,28 @@ RawArtifact allows raw string content to be placed as an artifact in a container ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **data** | **str** | Data is the string contents of the artifact | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_raw_artifact import IoArgoprojWorkflowV1alpha1RawArtifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1RawArtifact from a JSON string +io_argoproj_workflow_v1alpha1_raw_artifact_instance = IoArgoprojWorkflowV1alpha1RawArtifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1RawArtifact.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_raw_artifact_dict = io_argoproj_workflow_v1alpha1_raw_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1RawArtifact from a dict +io_argoproj_workflow_v1alpha1_raw_artifact_form_dict = io_argoproj_workflow_v1alpha1_raw_artifact.from_dict(io_argoproj_workflow_v1alpha1_raw_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md index 0d6badfbf855..f948aa7bd1c7 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResourceTemplate.md @@ -3,18 +3,35 @@ ResourceTemplate is a template subtype to manipulate kubernetes resources ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **action** | **str** | Action is the action to perform to the resource. Must be one of: get, create, apply, delete, replace, patch | **failure_condition** | **str** | FailureCondition is a label selector expression which describes the conditions of the k8s resource in which the step was considered failed | [optional] -**flags** | **[str]** | Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ \"--validate=false\" # disable resource validation ] | [optional] +**flags** | **List[str]** | Flags is a set of additional options passed to kubectl before submitting a resource I.e. to disable resource validation: flags: [ \"--validate=false\" # disable resource validation ] | [optional] **manifest** | **str** | Manifest contains the kubernetes manifest | [optional] **manifest_from** | [**IoArgoprojWorkflowV1alpha1ManifestFrom**](IoArgoprojWorkflowV1alpha1ManifestFrom.md) | | [optional] **merge_strategy** | **str** | MergeStrategy is the strategy used to merge a patch. It defaults to \"strategic\" Must be one of: strategic, merge, json | [optional] **set_owner_reference** | **bool** | SetOwnerReference sets the reference to the workflow on the OwnerReference of generated resource. | [optional] **success_condition** | **str** | SuccessCondition is a label selector expression which describes the conditions of the k8s resource in which it is acceptable to proceed to the following step | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_resource_template import IoArgoprojWorkflowV1alpha1ResourceTemplate + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ResourceTemplate from a JSON string +io_argoproj_workflow_v1alpha1_resource_template_instance = IoArgoprojWorkflowV1alpha1ResourceTemplate.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ResourceTemplate.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_resource_template_dict = io_argoproj_workflow_v1alpha1_resource_template_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ResourceTemplate from a dict +io_argoproj_workflow_v1alpha1_resource_template_form_dict = io_argoproj_workflow_v1alpha1_resource_template.from_dict(io_argoproj_workflow_v1alpha1_resource_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md index f27f935a3f4c..e6dd430f6119 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.md @@ -2,15 +2,32 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **memoized** | **bool** | | [optional] **name** | **str** | | [optional] **namespace** | **str** | | [optional] -**parameters** | **[str]** | | [optional] +**parameters** | **List[str]** | | [optional] **uid** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request import IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest from a JSON string +io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request_instance = IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request_dict = io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ResubmitArchivedWorkflowRequest from a dict +io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request_form_dict = io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request.from_dict(io_argoproj_workflow_v1alpha1_resubmit_archived_workflow_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryAffinity.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryAffinity.md index cda9b060f940..ab220671a244 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryAffinity.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryAffinity.md @@ -3,11 +3,28 @@ RetryAffinity prevents running steps on the same host. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**node_anti_affinity** | **bool, date, datetime, dict, float, int, list, str, none_type** | RetryNodeAntiAffinity is a placeholder for future expansion, only empty nodeAntiAffinity is allowed. In order to prevent running steps on the same host, it uses \"kubernetes.io/hostname\". | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**node_anti_affinity** | **object** | RetryNodeAntiAffinity is a placeholder for future expansion, only empty nodeAntiAffinity is allowed. In order to prevent running steps on the same host, it uses \"kubernetes.io/hostname\". | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_affinity import IoArgoprojWorkflowV1alpha1RetryAffinity + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1RetryAffinity from a JSON string +io_argoproj_workflow_v1alpha1_retry_affinity_instance = IoArgoprojWorkflowV1alpha1RetryAffinity.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1RetryAffinity.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_retry_affinity_dict = io_argoproj_workflow_v1alpha1_retry_affinity_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1RetryAffinity from a dict +io_argoproj_workflow_v1alpha1_retry_affinity_form_dict = io_argoproj_workflow_v1alpha1_retry_affinity.from_dict(io_argoproj_workflow_v1alpha1_retry_affinity_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md index cdbf4e08e85f..80016f07e04c 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.md @@ -2,16 +2,33 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **namespace** | **str** | | [optional] **node_field_selector** | **str** | | [optional] -**parameters** | **[str]** | | [optional] +**parameters** | **List[str]** | | [optional] **restart_successful** | **bool** | | [optional] **uid** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_archived_workflow_request import IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest from a JSON string +io_argoproj_workflow_v1alpha1_retry_archived_workflow_request_instance = IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_retry_archived_workflow_request_dict = io_argoproj_workflow_v1alpha1_retry_archived_workflow_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1RetryArchivedWorkflowRequest from a dict +io_argoproj_workflow_v1alpha1_retry_archived_workflow_request_form_dict = io_argoproj_workflow_v1alpha1_retry_archived_workflow_request.from_dict(io_argoproj_workflow_v1alpha1_retry_archived_workflow_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryStrategy.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryStrategy.md index 77d434c1cf3d..34c5f1dd9d58 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryStrategy.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1RetryStrategy.md @@ -3,6 +3,7 @@ RetryStrategy provides controls on how to retry a workflow step ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **affinity** | [**IoArgoprojWorkflowV1alpha1RetryAffinity**](IoArgoprojWorkflowV1alpha1RetryAffinity.md) | | [optional] @@ -10,8 +11,24 @@ Name | Type | Description | Notes **expression** | **str** | Expression is a condition expression for when a node will be retried. If it evaluates to false, the node will not be retried and the retry strategy will be ignored | [optional] **limit** | **str** | | [optional] **retry_policy** | **str** | RetryPolicy is a policy of NodePhase statuses that will be retried | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_retry_strategy import IoArgoprojWorkflowV1alpha1RetryStrategy + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1RetryStrategy from a JSON string +io_argoproj_workflow_v1alpha1_retry_strategy_instance = IoArgoprojWorkflowV1alpha1RetryStrategy.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1RetryStrategy.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_retry_strategy_dict = io_argoproj_workflow_v1alpha1_retry_strategy_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1RetryStrategy from a dict +io_argoproj_workflow_v1alpha1_retry_strategy_form_dict = io_argoproj_workflow_v1alpha1_retry_strategy.from_dict(io_argoproj_workflow_v1alpha1_retry_strategy_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3Artifact.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3Artifact.md index ef36d6a0acb0..df0bbaac4676 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3Artifact.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3Artifact.md @@ -3,6 +3,7 @@ S3Artifact is the location of an S3 artifact ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] @@ -17,8 +18,24 @@ Name | Type | Description | Notes **role_arn** | **str** | RoleARN is the Amazon Resource Name (ARN) of the role to assume. | [optional] **secret_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **use_sdk_creds** | **bool** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact import IoArgoprojWorkflowV1alpha1S3Artifact + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1S3Artifact from a JSON string +io_argoproj_workflow_v1alpha1_s3_artifact_instance = IoArgoprojWorkflowV1alpha1S3Artifact.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1S3Artifact.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_s3_artifact_dict = io_argoproj_workflow_v1alpha1_s3_artifact_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1S3Artifact from a dict +io_argoproj_workflow_v1alpha1_s3_artifact_form_dict = io_argoproj_workflow_v1alpha1_s3_artifact.from_dict(io_argoproj_workflow_v1alpha1_s3_artifact_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3ArtifactRepository.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3ArtifactRepository.md index 1e46072155e5..687bcab440da 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3ArtifactRepository.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3ArtifactRepository.md @@ -3,6 +3,7 @@ S3ArtifactRepository defines the controller configuration for an S3 artifact repository ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] @@ -18,8 +19,24 @@ Name | Type | Description | Notes **role_arn** | **str** | RoleARN is the Amazon Resource Name (ARN) of the role to assume. | [optional] **secret_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **use_sdk_creds** | **bool** | UseSDKCreds tells the driver to figure out credentials based on sdk defaults. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_artifact_repository import IoArgoprojWorkflowV1alpha1S3ArtifactRepository + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1S3ArtifactRepository from a JSON string +io_argoproj_workflow_v1alpha1_s3_artifact_repository_instance = IoArgoprojWorkflowV1alpha1S3ArtifactRepository.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1S3ArtifactRepository.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_s3_artifact_repository_dict = io_argoproj_workflow_v1alpha1_s3_artifact_repository_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1S3ArtifactRepository from a dict +io_argoproj_workflow_v1alpha1_s3_artifact_repository_form_dict = io_argoproj_workflow_v1alpha1_s3_artifact_repository.from_dict(io_argoproj_workflow_v1alpha1_s3_artifact_repository_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3EncryptionOptions.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3EncryptionOptions.md index 4be54c3ce72d..ca8f761ae26b 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3EncryptionOptions.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1S3EncryptionOptions.md @@ -3,14 +3,31 @@ S3EncryptionOptions used to determine encryption options during s3 operations ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **enable_encryption** | **bool** | EnableEncryption tells the driver to encrypt objects if set to true. If kmsKeyId and serverSideCustomerKeySecret are not set, SSE-S3 will be used | [optional] **kms_encryption_context** | **str** | KmsEncryptionContext is a json blob that contains an encryption context. See https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context for more information | [optional] **kms_key_id** | **str** | KMSKeyId tells the driver to encrypt the object using the specified KMS Key. | [optional] **server_side_customer_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_s3_encryption_options import IoArgoprojWorkflowV1alpha1S3EncryptionOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1S3EncryptionOptions from a JSON string +io_argoproj_workflow_v1alpha1_s3_encryption_options_instance = IoArgoprojWorkflowV1alpha1S3EncryptionOptions.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1S3EncryptionOptions.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_s3_encryption_options_dict = io_argoproj_workflow_v1alpha1_s3_encryption_options_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1S3EncryptionOptions from a dict +io_argoproj_workflow_v1alpha1_s3_encryption_options_form_dict = io_argoproj_workflow_v1alpha1_s3_encryption_options.from_dict(io_argoproj_workflow_v1alpha1_s3_encryption_options_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md index 9a96612ef976..eba83744eb06 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ScriptTemplate.md @@ -3,33 +3,50 @@ ScriptTemplate is a template subtype to enable scripting through code steps ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**args** | **List[str]** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **List[str]** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**env** | [**List[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] +**env_from** | [**List[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] **image** | **str** | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | -**source** | **str** | Source contains the source code of the script to execute | -**args** | **[str]** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **[str]** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**env** | [**[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] -**env_from** | [**[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] **image_pull_policy** | **str** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images | [optional] **lifecycle** | [**Lifecycle**](Lifecycle.md) | | [optional] **liveness_probe** | [**Probe**](Probe.md) | | [optional] **name** | **str** | Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. | [optional] -**ports** | [**[ContainerPort]**](ContainerPort.md) | List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated. | [optional] +**ports** | [**List[ContainerPort]**](ContainerPort.md) | List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated. | [optional] **readiness_probe** | [**Probe**](Probe.md) | | [optional] **resources** | [**ResourceRequirements**](ResourceRequirements.md) | | [optional] **security_context** | [**SecurityContext**](SecurityContext.md) | | [optional] +**source** | **str** | Source contains the source code of the script to execute | **startup_probe** | [**Probe**](Probe.md) | | [optional] **stdin** | **bool** | Whether this container should allocate a buffer for stdin in the container runtime. If this is not set, reads from stdin in the container will always result in EOF. Default is false. | [optional] **stdin_once** | **bool** | Whether the container runtime should close the stdin channel after it has been opened by a single attach. When stdin is true the stdin stream will remain open across multiple attach sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the first client attaches to stdin, and then remains open and accepts data until the client disconnects, at which time stdin is closed and remains closed until the container is restarted. If this flag is false, a container processes that reads from stdin will never receive an EOF. Default is false | [optional] **termination_message_path** | **str** | Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated. | [optional] **termination_message_policy** | **str** | Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. | [optional] **tty** | **bool** | Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false. | [optional] -**volume_devices** | [**[VolumeDevice]**](VolumeDevice.md) | volumeDevices is the list of block devices to be used by the container. | [optional] -**volume_mounts** | [**[VolumeMount]**](VolumeMount.md) | Pod volumes to mount into the container's filesystem. Cannot be updated. | [optional] +**volume_devices** | [**List[VolumeDevice]**](VolumeDevice.md) | volumeDevices is the list of block devices to be used by the container. | [optional] +**volume_mounts** | [**List[VolumeMount]**](VolumeMount.md) | Pod volumes to mount into the container's filesystem. Cannot be updated. | [optional] **working_dir** | **str** | Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_script_template import IoArgoprojWorkflowV1alpha1ScriptTemplate + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ScriptTemplate from a JSON string +io_argoproj_workflow_v1alpha1_script_template_instance = IoArgoprojWorkflowV1alpha1ScriptTemplate.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ScriptTemplate.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_script_template_dict = io_argoproj_workflow_v1alpha1_script_template_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ScriptTemplate from a dict +io_argoproj_workflow_v1alpha1_script_template_form_dict = io_argoproj_workflow_v1alpha1_script_template.from_dict(io_argoproj_workflow_v1alpha1_script_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreHolding.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreHolding.md index 355b0f083713..1abc1776d372 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreHolding.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreHolding.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**holders** | **[str]** | Holders stores the list of current holder names in the io.argoproj.workflow.v1alpha1. | [optional] +**holders** | **List[str]** | Holders stores the list of current holder names in the io.argoproj.workflow.v1alpha1. | [optional] **semaphore** | **str** | Semaphore stores the semaphore name. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_holding import IoArgoprojWorkflowV1alpha1SemaphoreHolding + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1SemaphoreHolding from a JSON string +io_argoproj_workflow_v1alpha1_semaphore_holding_instance = IoArgoprojWorkflowV1alpha1SemaphoreHolding.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1SemaphoreHolding.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_semaphore_holding_dict = io_argoproj_workflow_v1alpha1_semaphore_holding_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1SemaphoreHolding from a dict +io_argoproj_workflow_v1alpha1_semaphore_holding_form_dict = io_argoproj_workflow_v1alpha1_semaphore_holding.from_dict(io_argoproj_workflow_v1alpha1_semaphore_holding_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreRef.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreRef.md index 86661369b3ab..f331221abc2f 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreRef.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreRef.md @@ -3,12 +3,29 @@ SemaphoreRef is a reference of Semaphore ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config_map_key_ref** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | [optional] **namespace** | **str** | Namespace is the namespace of the configmap, default: [namespace of workflow] | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_ref import IoArgoprojWorkflowV1alpha1SemaphoreRef + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1SemaphoreRef from a JSON string +io_argoproj_workflow_v1alpha1_semaphore_ref_instance = IoArgoprojWorkflowV1alpha1SemaphoreRef.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1SemaphoreRef.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_semaphore_ref_dict = io_argoproj_workflow_v1alpha1_semaphore_ref_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1SemaphoreRef from a dict +io_argoproj_workflow_v1alpha1_semaphore_ref_form_dict = io_argoproj_workflow_v1alpha1_semaphore_ref.from_dict(io_argoproj_workflow_v1alpha1_semaphore_ref_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreStatus.md index 957f1dc37392..ae3218fe0ea5 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SemaphoreStatus.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**holding** | [**[IoArgoprojWorkflowV1alpha1SemaphoreHolding]**](IoArgoprojWorkflowV1alpha1SemaphoreHolding.md) | Holding stores the list of resource acquired synchronization lock for workflows. | [optional] -**waiting** | [**[IoArgoprojWorkflowV1alpha1SemaphoreHolding]**](IoArgoprojWorkflowV1alpha1SemaphoreHolding.md) | Waiting indicates the list of current synchronization lock holders. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**holding** | [**List[IoArgoprojWorkflowV1alpha1SemaphoreHolding]**](IoArgoprojWorkflowV1alpha1SemaphoreHolding.md) | Holding stores the list of resource acquired synchronization lock for workflows. | [optional] +**waiting** | [**List[IoArgoprojWorkflowV1alpha1SemaphoreHolding]**](IoArgoprojWorkflowV1alpha1SemaphoreHolding.md) | Waiting indicates the list of current synchronization lock holders. | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_semaphore_status import IoArgoprojWorkflowV1alpha1SemaphoreStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1SemaphoreStatus from a JSON string +io_argoproj_workflow_v1alpha1_semaphore_status_instance = IoArgoprojWorkflowV1alpha1SemaphoreStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1SemaphoreStatus.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_semaphore_status_dict = io_argoproj_workflow_v1alpha1_semaphore_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1SemaphoreStatus from a dict +io_argoproj_workflow_v1alpha1_semaphore_status_form_dict = io_argoproj_workflow_v1alpha1_semaphore_status.from_dict(io_argoproj_workflow_v1alpha1_semaphore_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Sequence.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Sequence.md index a0d8f381f3b3..48ce60b02815 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Sequence.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Sequence.md @@ -3,14 +3,31 @@ Sequence expands a workflow step into numeric range ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **count** | **str** | | [optional] **end** | **str** | | [optional] **format** | **str** | Format is a printf format string to format the value in the sequence | [optional] **start** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_sequence import IoArgoprojWorkflowV1alpha1Sequence + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Sequence from a JSON string +io_argoproj_workflow_v1alpha1_sequence_instance = IoArgoprojWorkflowV1alpha1Sequence.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Sequence.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_sequence_dict = io_argoproj_workflow_v1alpha1_sequence_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Sequence from a dict +io_argoproj_workflow_v1alpha1_sequence_form_dict = io_argoproj_workflow_v1alpha1_sequence.from_dict(io_argoproj_workflow_v1alpha1_sequence_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1StopStrategy.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1StopStrategy.md index e1bd33760579..cc482e90afbc 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1StopStrategy.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1StopStrategy.md @@ -3,11 +3,28 @@ StopStrategy defines if the cron workflow will stop being triggered once a certain condition has been reached, involving a number of runs of the workflow ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **condition** | **str** | Condition defines a condition that stops scheduling workflows when evaluates to true. Use the keywords `failed` or `succeeded` to access the number of failed or successful child workflows. | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_stop_strategy import IoArgoprojWorkflowV1alpha1StopStrategy + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1StopStrategy from a JSON string +io_argoproj_workflow_v1alpha1_stop_strategy_instance = IoArgoprojWorkflowV1alpha1StopStrategy.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1StopStrategy.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_stop_strategy_dict = io_argoproj_workflow_v1alpha1_stop_strategy_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1StopStrategy from a dict +io_argoproj_workflow_v1alpha1_stop_strategy_form_dict = io_argoproj_workflow_v1alpha1_stop_strategy.from_dict(io_argoproj_workflow_v1alpha1_stop_strategy_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Submit.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Submit.md index fc267c2edfe0..9127753f56e2 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Submit.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Submit.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**workflow_template_ref** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplateRef**](IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.md) | | **arguments** | [**IoArgoprojWorkflowV1alpha1Arguments**](IoArgoprojWorkflowV1alpha1Arguments.md) | | [optional] **metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**workflow_template_ref** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplateRef**](IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_submit import IoArgoprojWorkflowV1alpha1Submit + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Submit from a JSON string +io_argoproj_workflow_v1alpha1_submit_instance = IoArgoprojWorkflowV1alpha1Submit.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Submit.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_submit_dict = io_argoproj_workflow_v1alpha1_submit_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Submit from a dict +io_argoproj_workflow_v1alpha1_submit_form_dict = io_argoproj_workflow_v1alpha1_submit.from_dict(io_argoproj_workflow_v1alpha1_submit_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SubmitOpts.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SubmitOpts.md index 5dd5b86e1199..8d1b2646a5c1 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SubmitOpts.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SubmitOpts.md @@ -3,6 +3,7 @@ SubmitOpts are workflow submission options ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **annotations** | **str** | Annotations adds to metadata.labels | [optional] @@ -12,13 +13,29 @@ Name | Type | Description | Notes **labels** | **str** | Labels adds to metadata.labels | [optional] **name** | **str** | Name overrides metadata.name | [optional] **owner_reference** | [**OwnerReference**](OwnerReference.md) | | [optional] -**parameters** | **[str]** | Parameters passes input parameters to workflow | [optional] +**parameters** | **List[str]** | Parameters passes input parameters to workflow | [optional] **pod_priority_class_name** | **str** | Set the podPriorityClassName of the workflow | [optional] **priority** | **int** | Priority is used if controller is configured to process limited number of workflows in parallel, higher priority workflows are processed first. | [optional] **server_dry_run** | **bool** | ServerDryRun validates the workflow on the server-side without creating it | [optional] **service_account** | **str** | ServiceAccount runs all pods in the workflow using specified ServiceAccount. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_submit_opts import IoArgoprojWorkflowV1alpha1SubmitOpts + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1SubmitOpts from a JSON string +io_argoproj_workflow_v1alpha1_submit_opts_instance = IoArgoprojWorkflowV1alpha1SubmitOpts.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1SubmitOpts.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_submit_opts_dict = io_argoproj_workflow_v1alpha1_submit_opts_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1SubmitOpts from a dict +io_argoproj_workflow_v1alpha1_submit_opts_form_dict = io_argoproj_workflow_v1alpha1_submit_opts.from_dict(io_argoproj_workflow_v1alpha1_submit_opts_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SuspendTemplate.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SuspendTemplate.md index cc84b5b23e71..b5b65e32596a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SuspendTemplate.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SuspendTemplate.md @@ -3,11 +3,28 @@ SuspendTemplate is a template subtype to suspend a workflow at a predetermined point in time ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **duration** | **str** | Duration is the seconds to wait before automatically resuming a template. Must be a string. Default unit is seconds. Could also be a Duration, e.g.: \"2m\", \"6h\" | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_suspend_template import IoArgoprojWorkflowV1alpha1SuspendTemplate + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1SuspendTemplate from a JSON string +io_argoproj_workflow_v1alpha1_suspend_template_instance = IoArgoprojWorkflowV1alpha1SuspendTemplate.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1SuspendTemplate.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_suspend_template_dict = io_argoproj_workflow_v1alpha1_suspend_template_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1SuspendTemplate from a dict +io_argoproj_workflow_v1alpha1_suspend_template_form_dict = io_argoproj_workflow_v1alpha1_suspend_template.from_dict(io_argoproj_workflow_v1alpha1_suspend_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Synchronization.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Synchronization.md index fc8ba3f2e9c5..2dd8dfc1b73a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Synchronization.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Synchronization.md @@ -3,12 +3,29 @@ Synchronization holds synchronization lock configuration ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **mutex** | [**IoArgoprojWorkflowV1alpha1Mutex**](IoArgoprojWorkflowV1alpha1Mutex.md) | | [optional] **semaphore** | [**IoArgoprojWorkflowV1alpha1SemaphoreRef**](IoArgoprojWorkflowV1alpha1SemaphoreRef.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_synchronization import IoArgoprojWorkflowV1alpha1Synchronization + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Synchronization from a JSON string +io_argoproj_workflow_v1alpha1_synchronization_instance = IoArgoprojWorkflowV1alpha1Synchronization.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Synchronization.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_synchronization_dict = io_argoproj_workflow_v1alpha1_synchronization_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Synchronization from a dict +io_argoproj_workflow_v1alpha1_synchronization_form_dict = io_argoproj_workflow_v1alpha1_synchronization.from_dict(io_argoproj_workflow_v1alpha1_synchronization_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SynchronizationStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SynchronizationStatus.md index de4e36381ac3..eb790ec599b2 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SynchronizationStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1SynchronizationStatus.md @@ -3,12 +3,29 @@ SynchronizationStatus stores the status of semaphore and mutex. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **mutex** | [**IoArgoprojWorkflowV1alpha1MutexStatus**](IoArgoprojWorkflowV1alpha1MutexStatus.md) | | [optional] **semaphore** | [**IoArgoprojWorkflowV1alpha1SemaphoreStatus**](IoArgoprojWorkflowV1alpha1SemaphoreStatus.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_synchronization_status import IoArgoprojWorkflowV1alpha1SynchronizationStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1SynchronizationStatus from a JSON string +io_argoproj_workflow_v1alpha1_synchronization_status_instance = IoArgoprojWorkflowV1alpha1SynchronizationStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1SynchronizationStatus.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_synchronization_status_dict = io_argoproj_workflow_v1alpha1_synchronization_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1SynchronizationStatus from a dict +io_argoproj_workflow_v1alpha1_synchronization_status_form_dict = io_argoproj_workflow_v1alpha1_synchronization_status.from_dict(io_argoproj_workflow_v1alpha1_synchronization_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TTLStrategy.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TTLStrategy.md index d900c0f3f62c..8615437b14e4 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TTLStrategy.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TTLStrategy.md @@ -3,13 +3,30 @@ TTLStrategy is the strategy for the time to live depending on if the workflow succeeded or failed ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **seconds_after_completion** | **int** | SecondsAfterCompletion is the number of seconds to live after completion | [optional] **seconds_after_failure** | **int** | SecondsAfterFailure is the number of seconds to live after failure | [optional] **seconds_after_success** | **int** | SecondsAfterSuccess is the number of seconds to live after success | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_ttl_strategy import IoArgoprojWorkflowV1alpha1TTLStrategy + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1TTLStrategy from a JSON string +io_argoproj_workflow_v1alpha1_ttl_strategy_instance = IoArgoprojWorkflowV1alpha1TTLStrategy.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1TTLStrategy.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_ttl_strategy_dict = io_argoproj_workflow_v1alpha1_ttl_strategy_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1TTLStrategy from a dict +io_argoproj_workflow_v1alpha1_ttl_strategy_form_dict = io_argoproj_workflow_v1alpha1_ttl_strategy.from_dict(io_argoproj_workflow_v1alpha1_ttl_strategy_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TarStrategy.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TarStrategy.md index 9a83909bd070..67796288bdb1 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TarStrategy.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TarStrategy.md @@ -3,11 +3,28 @@ TarStrategy will tar and gzip the file or directory when saving ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **compression_level** | **int** | CompressionLevel specifies the gzip compression level to use for the artifact. Defaults to gzip.DefaultCompression. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_tar_strategy import IoArgoprojWorkflowV1alpha1TarStrategy + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1TarStrategy from a JSON string +io_argoproj_workflow_v1alpha1_tar_strategy_instance = IoArgoprojWorkflowV1alpha1TarStrategy.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1TarStrategy.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_tar_strategy_dict = io_argoproj_workflow_v1alpha1_tar_strategy_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1TarStrategy from a dict +io_argoproj_workflow_v1alpha1_tar_strategy_form_dict = io_argoproj_workflow_v1alpha1_tar_strategy.from_dict(io_argoproj_workflow_v1alpha1_tar_strategy_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Template.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Template.md index edcf27bef471..b7c8fe149cce 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Template.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Template.md @@ -3,6 +3,7 @@ Template is a reusable and composable unit of execution in a workflow ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **active_deadline_seconds** | **str** | | [optional] @@ -16,18 +17,18 @@ Name | Type | Description | Notes **data** | [**IoArgoprojWorkflowV1alpha1Data**](IoArgoprojWorkflowV1alpha1Data.md) | | [optional] **executor** | [**IoArgoprojWorkflowV1alpha1ExecutorConfig**](IoArgoprojWorkflowV1alpha1ExecutorConfig.md) | | [optional] **fail_fast** | **bool** | FailFast, if specified, will fail this template if any of its child pods has failed. This is useful for when this template is expanded with `withItems`, etc. | [optional] -**host_aliases** | [**[HostAlias]**](HostAlias.md) | HostAliases is an optional list of hosts and IPs that will be injected into the pod spec | [optional] +**host_aliases** | [**List[HostAlias]**](HostAlias.md) | HostAliases is an optional list of hosts and IPs that will be injected into the pod spec | [optional] **http** | [**IoArgoprojWorkflowV1alpha1HTTP**](IoArgoprojWorkflowV1alpha1HTTP.md) | | [optional] -**init_containers** | [**[IoArgoprojWorkflowV1alpha1UserContainer]**](IoArgoprojWorkflowV1alpha1UserContainer.md) | InitContainers is a list of containers which run before the main container. | [optional] +**init_containers** | [**List[IoArgoprojWorkflowV1alpha1UserContainer]**](IoArgoprojWorkflowV1alpha1UserContainer.md) | InitContainers is a list of containers which run before the main container. | [optional] **inputs** | [**IoArgoprojWorkflowV1alpha1Inputs**](IoArgoprojWorkflowV1alpha1Inputs.md) | | [optional] **memoize** | [**IoArgoprojWorkflowV1alpha1Memoize**](IoArgoprojWorkflowV1alpha1Memoize.md) | | [optional] **metadata** | [**IoArgoprojWorkflowV1alpha1Metadata**](IoArgoprojWorkflowV1alpha1Metadata.md) | | [optional] **metrics** | [**IoArgoprojWorkflowV1alpha1Metrics**](IoArgoprojWorkflowV1alpha1Metrics.md) | | [optional] **name** | **str** | Name is the name of the template | [optional] -**node_selector** | **{str: (str,)}** | NodeSelector is a selector to schedule this step of the workflow to be run on the selected node(s). Overrides the selector set at the workflow level. | [optional] +**node_selector** | **Dict[str, str]** | NodeSelector is a selector to schedule this step of the workflow to be run on the selected node(s). Overrides the selector set at the workflow level. | [optional] **outputs** | [**IoArgoprojWorkflowV1alpha1Outputs**](IoArgoprojWorkflowV1alpha1Outputs.md) | | [optional] **parallelism** | **int** | Parallelism limits the max total parallel pods that can execute at the same time within the boundaries of this template invocation. If additional steps/dag templates are invoked, the pods created by those templates will not be counted towards this total. | [optional] -**plugin** | **bool, date, datetime, dict, float, int, list, str, none_type** | Plugin is an Object with exactly one key | [optional] +**plugin** | **object** | Plugin is an Object with exactly one key | [optional] **pod_spec_patch** | **str** | PodSpecPatch holds strategic merge patch to apply against the pod spec. Allows parameterization of container fields which are not strings (e.g. resource limits). | [optional] **priority** | **int** | Priority to apply to workflow pods. | [optional] **priority_class_name** | **str** | PriorityClassName to apply to workflow pods. | [optional] @@ -37,15 +38,31 @@ Name | Type | Description | Notes **script** | [**IoArgoprojWorkflowV1alpha1ScriptTemplate**](IoArgoprojWorkflowV1alpha1ScriptTemplate.md) | | [optional] **security_context** | [**PodSecurityContext**](PodSecurityContext.md) | | [optional] **service_account_name** | **str** | ServiceAccountName to apply to workflow pods | [optional] -**sidecars** | [**[IoArgoprojWorkflowV1alpha1UserContainer]**](IoArgoprojWorkflowV1alpha1UserContainer.md) | Sidecars is a list of containers which run alongside the main container Sidecars are automatically killed when the main container completes | [optional] -**steps** | [**[IoArgoprojWorkflowV1alpha1ParallelSteps]**](IoArgoprojWorkflowV1alpha1ParallelSteps.md) | Steps define a series of sequential/parallel workflow steps | [optional] +**sidecars** | [**List[IoArgoprojWorkflowV1alpha1UserContainer]**](IoArgoprojWorkflowV1alpha1UserContainer.md) | Sidecars is a list of containers which run alongside the main container Sidecars are automatically killed when the main container completes | [optional] +**steps** | [**List[IoArgoprojWorkflowV1alpha1ParallelSteps]**](IoArgoprojWorkflowV1alpha1ParallelSteps.md) | Steps define a series of sequential/parallel workflow steps | [optional] **suspend** | [**IoArgoprojWorkflowV1alpha1SuspendTemplate**](IoArgoprojWorkflowV1alpha1SuspendTemplate.md) | | [optional] **synchronization** | [**IoArgoprojWorkflowV1alpha1Synchronization**](IoArgoprojWorkflowV1alpha1Synchronization.md) | | [optional] **timeout** | **str** | Timeout allows to set the total node execution timeout duration counting from the node's start time. This duration also includes time in which the node spends in Pending state. This duration may not be applied to Step or DAG templates. | [optional] -**tolerations** | [**[Toleration]**](Toleration.md) | Tolerations to apply to workflow pods. | [optional] -**volumes** | [**[Volume]**](Volume.md) | Volumes is a list of volumes that can be mounted by containers in a template. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**tolerations** | [**List[Toleration]**](Toleration.md) | Tolerations to apply to workflow pods. | [optional] +**volumes** | [**List[Volume]**](Volume.md) | Volumes is a list of volumes that can be mounted by containers in a template. | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template import IoArgoprojWorkflowV1alpha1Template + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Template from a JSON string +io_argoproj_workflow_v1alpha1_template_instance = IoArgoprojWorkflowV1alpha1Template.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Template.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_template_dict = io_argoproj_workflow_v1alpha1_template_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Template from a dict +io_argoproj_workflow_v1alpha1_template_form_dict = io_argoproj_workflow_v1alpha1_template.from_dict(io_argoproj_workflow_v1alpha1_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TemplateRef.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TemplateRef.md index 7821082a8bfb..4fde4ae35992 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TemplateRef.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TemplateRef.md @@ -3,13 +3,30 @@ TemplateRef is a reference of template resource. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cluster_scope** | **bool** | ClusterScope indicates the referred template is cluster scoped (i.e. a ClusterWorkflowTemplate). | [optional] **name** | **str** | Name is the resource name of the template. | [optional] **template** | **str** | Template is the name of referred template in the resource. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_template_ref import IoArgoprojWorkflowV1alpha1TemplateRef + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1TemplateRef from a JSON string +io_argoproj_workflow_v1alpha1_template_ref_instance = IoArgoprojWorkflowV1alpha1TemplateRef.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1TemplateRef.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_template_ref_dict = io_argoproj_workflow_v1alpha1_template_ref_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1TemplateRef from a dict +io_argoproj_workflow_v1alpha1_template_ref_form_dict = io_argoproj_workflow_v1alpha1_template_ref.from_dict(io_argoproj_workflow_v1alpha1_template_ref_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TransformationStep.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TransformationStep.md index ba2f0934b2f2..6d18e5bad968 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TransformationStep.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1TransformationStep.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **expression** | **str** | Expression defines an expr expression to apply | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_transformation_step import IoArgoprojWorkflowV1alpha1TransformationStep + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1TransformationStep from a JSON string +io_argoproj_workflow_v1alpha1_transformation_step_instance = IoArgoprojWorkflowV1alpha1TransformationStep.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1TransformationStep.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_transformation_step_dict = io_argoproj_workflow_v1alpha1_transformation_step_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1TransformationStep from a dict +io_argoproj_workflow_v1alpha1_transformation_step_form_dict = io_argoproj_workflow_v1alpha1_transformation_step.from_dict(io_argoproj_workflow_v1alpha1_transformation_step_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest.md index a084e05ad7db..066bbe6d6326 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cron_workflow** | [**IoArgoprojWorkflowV1alpha1CronWorkflow**](IoArgoprojWorkflowV1alpha1CronWorkflow.md) | | [optional] **name** | **str** | DEPRECATED: This field is ignored. | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_update_cron_workflow_request import IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest from a JSON string +io_argoproj_workflow_v1alpha1_update_cron_workflow_request_instance = IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_update_cron_workflow_request_dict = io_argoproj_workflow_v1alpha1_update_cron_workflow_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1UpdateCronWorkflowRequest from a dict +io_argoproj_workflow_v1alpha1_update_cron_workflow_request_form_dict = io_argoproj_workflow_v1alpha1_update_cron_workflow_request.from_dict(io_argoproj_workflow_v1alpha1_update_cron_workflow_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md index 2b2147e7329b..5818c43de391 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1UserContainer.md @@ -3,19 +3,20 @@ UserContainer is a container specified by a user. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **str** | Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. | -**args** | **[str]** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**command** | **[str]** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] -**env** | [**[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] -**env_from** | [**[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] +**args** | **List[str]** | Arguments to the entrypoint. The container image's CMD is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**command** | **List[str]** | Entrypoint array. Not executed within a shell. The container image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. \"$$(VAR_NAME)\" will produce the string literal \"$(VAR_NAME)\". Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell | [optional] +**env** | [**List[EnvVar]**](EnvVar.md) | List of environment variables to set in the container. Cannot be updated. | [optional] +**env_from** | [**List[EnvFromSource]**](EnvFromSource.md) | List of sources to populate environment variables in the container. The keys defined within a source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container is starting. When a key exists in multiple sources, the value associated with the last source will take precedence. Values defined by an Env with a duplicate key will take precedence. Cannot be updated. | [optional] **image** | **str** | Container image name. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] **image_pull_policy** | **str** | Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images | [optional] **lifecycle** | [**Lifecycle**](Lifecycle.md) | | [optional] **liveness_probe** | [**Probe**](Probe.md) | | [optional] **mirror_volume_mounts** | **bool** | MirrorVolumeMounts will mount the same volumes specified in the main container to the container (including artifacts), at the same mountPaths. This enables dind daemon to partially see the same filesystem as the main container in order to use features such as docker volume binding | [optional] -**ports** | [**[ContainerPort]**](ContainerPort.md) | List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated. | [optional] +**name** | **str** | Name of the container specified as a DNS_LABEL. Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated. | +**ports** | [**List[ContainerPort]**](ContainerPort.md) | List of ports to expose from the container. Not specifying a port here DOES NOT prevent that port from being exposed. Any port which is listening on the default \"0.0.0.0\" address inside a container will be accessible from the network. Modifying this array with strategic merge patch may corrupt the data. For more information See https://github.com/kubernetes/kubernetes/issues/108255. Cannot be updated. | [optional] **readiness_probe** | [**Probe**](Probe.md) | | [optional] **resources** | [**ResourceRequirements**](ResourceRequirements.md) | | [optional] **security_context** | [**SecurityContext**](SecurityContext.md) | | [optional] @@ -25,11 +26,27 @@ Name | Type | Description | Notes **termination_message_path** | **str** | Optional: Path at which the file to which the container's termination message will be written is mounted into the container's filesystem. Message written is intended to be brief final status, such as an assertion failure message. Will be truncated by the node if greater than 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to /dev/termination-log. Cannot be updated. | [optional] **termination_message_policy** | **str** | Indicate how the termination message should be populated. File will use the contents of terminationMessagePath to populate the container status message on both success and failure. FallbackToLogsOnError will use the last chunk of container log output if the termination message file is empty and the container exited with an error. The log output is limited to 2048 bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. | [optional] **tty** | **bool** | Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. Default is false. | [optional] -**volume_devices** | [**[VolumeDevice]**](VolumeDevice.md) | volumeDevices is the list of block devices to be used by the container. | [optional] -**volume_mounts** | [**[VolumeMount]**](VolumeMount.md) | Pod volumes to mount into the container's filesystem. Cannot be updated. | [optional] +**volume_devices** | [**List[VolumeDevice]**](VolumeDevice.md) | volumeDevices is the list of block devices to be used by the container. | [optional] +**volume_mounts** | [**List[VolumeMount]**](VolumeMount.md) | Pod volumes to mount into the container's filesystem. Cannot be updated. | [optional] **working_dir** | **str** | Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_user_container import IoArgoprojWorkflowV1alpha1UserContainer + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1UserContainer from a JSON string +io_argoproj_workflow_v1alpha1_user_container_instance = IoArgoprojWorkflowV1alpha1UserContainer.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1UserContainer.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_user_container_dict = io_argoproj_workflow_v1alpha1_user_container_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1UserContainer from a dict +io_argoproj_workflow_v1alpha1_user_container_form_dict = io_argoproj_workflow_v1alpha1_user_container.from_dict(io_argoproj_workflow_v1alpha1_user_container_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md index d603c7bddb90..e64f83135bf7 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md @@ -3,6 +3,7 @@ ValueFrom describes a location in which to obtain the value to a parameter ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config_map_key_ref** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | [optional] @@ -13,9 +14,25 @@ Name | Type | Description | Notes **json_path** | **str** | JSONPath of a resource to retrieve an output parameter value from in resource templates | [optional] **parameter** | **str** | Parameter reference to a step or dag task in which to retrieve an output parameter value from (e.g. '{{steps.mystep.outputs.myparam}}') | [optional] **path** | **str** | Path in the container to retrieve an output parameter value from in container templates | [optional] -**supplied** | **bool, date, datetime, dict, float, int, list, str, none_type** | SuppliedValueFrom is a placeholder for a value to be filled in directly, either through the CLI, API, etc. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**supplied** | **object** | SuppliedValueFrom is a placeholder for a value to be filled in directly, either through the CLI, API, etc. | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_value_from import IoArgoprojWorkflowV1alpha1ValueFrom + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1ValueFrom from a JSON string +io_argoproj_workflow_v1alpha1_value_from_instance = IoArgoprojWorkflowV1alpha1ValueFrom.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1ValueFrom.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_value_from_dict = io_argoproj_workflow_v1alpha1_value_from_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1ValueFrom from a dict +io_argoproj_workflow_v1alpha1_value_from_form_dict = io_argoproj_workflow_v1alpha1_value_from.from_dict(io_argoproj_workflow_v1alpha1_value_from_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Version.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Version.md index 8e7da940ad4f..05de8ed601a8 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Version.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Version.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **build_date** | **str** | | @@ -12,8 +13,24 @@ Name | Type | Description | Notes **go_version** | **str** | | **platform** | **str** | | **version** | **str** | | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_version import IoArgoprojWorkflowV1alpha1Version + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Version from a JSON string +io_argoproj_workflow_v1alpha1_version_instance = IoArgoprojWorkflowV1alpha1Version.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Version.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_version_dict = io_argoproj_workflow_v1alpha1_version_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Version from a dict +io_argoproj_workflow_v1alpha1_version_form_dict = io_argoproj_workflow_v1alpha1_version.from_dict(io_argoproj_workflow_v1alpha1_version_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1VolumeClaimGC.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1VolumeClaimGC.md index 54d84309f6fb..7136c6e0df24 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1VolumeClaimGC.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1VolumeClaimGC.md @@ -3,11 +3,28 @@ VolumeClaimGC describes how to delete volumes from completed Workflows ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **strategy** | **str** | Strategy is the strategy to use. One of \"OnWorkflowCompletion\", \"OnWorkflowSuccess\". Defaults to \"OnWorkflowSuccess\" | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_volume_claim_gc import IoArgoprojWorkflowV1alpha1VolumeClaimGC + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1VolumeClaimGC from a JSON string +io_argoproj_workflow_v1alpha1_volume_claim_gc_instance = IoArgoprojWorkflowV1alpha1VolumeClaimGC.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1VolumeClaimGC.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_volume_claim_gc_dict = io_argoproj_workflow_v1alpha1_volume_claim_gc_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1VolumeClaimGC from a dict +io_argoproj_workflow_v1alpha1_volume_claim_gc_form_dict = io_argoproj_workflow_v1alpha1_volume_claim_gc.from_dict(io_argoproj_workflow_v1alpha1_volume_claim_gc_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Workflow.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Workflow.md index c7d3ef617979..0c72806386e0 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Workflow.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Workflow.md @@ -3,15 +3,32 @@ Workflow is the definition of a workflow resource ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**metadata** | [**ObjectMeta**](ObjectMeta.md) | | -**spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] +**metadata** | [**ObjectMeta**](ObjectMeta.md) | | +**spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | **status** | [**IoArgoprojWorkflowV1alpha1WorkflowStatus**](IoArgoprojWorkflowV1alpha1WorkflowStatus.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1Workflow from a JSON string +io_argoproj_workflow_v1alpha1_workflow_instance = IoArgoprojWorkflowV1alpha1Workflow.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1Workflow.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_dict = io_argoproj_workflow_v1alpha1_workflow_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1Workflow from a dict +io_argoproj_workflow_v1alpha1_workflow_form_dict = io_argoproj_workflow_v1alpha1_workflow.from_dict(io_argoproj_workflow_v1alpha1_workflow_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowCreateRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowCreateRequest.md index adac729fdb94..46a8c1800981 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowCreateRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowCreateRequest.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **create_options** | [**CreateOptions**](CreateOptions.md) | | [optional] @@ -9,8 +10,24 @@ Name | Type | Description | Notes **namespace** | **str** | | [optional] **server_dry_run** | **bool** | | [optional] **workflow** | [**IoArgoprojWorkflowV1alpha1Workflow**](IoArgoprojWorkflowV1alpha1Workflow.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_create_request import IoArgoprojWorkflowV1alpha1WorkflowCreateRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowCreateRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_create_request_instance = IoArgoprojWorkflowV1alpha1WorkflowCreateRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowCreateRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_create_request_dict = io_argoproj_workflow_v1alpha1_workflow_create_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowCreateRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_create_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_create_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_create_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBinding.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBinding.md index e882cbe870b0..63a81d8e3096 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBinding.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBinding.md @@ -3,14 +3,31 @@ WorkflowEventBinding is the definition of an event resource ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**metadata** | [**ObjectMeta**](ObjectMeta.md) | | -**spec** | [**IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec**](IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**metadata** | [**ObjectMeta**](ObjectMeta.md) | | +**spec** | [**IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec**](IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding import IoArgoprojWorkflowV1alpha1WorkflowEventBinding + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBinding from a JSON string +io_argoproj_workflow_v1alpha1_workflow_event_binding_instance = IoArgoprojWorkflowV1alpha1WorkflowEventBinding.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowEventBinding.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_event_binding_dict = io_argoproj_workflow_v1alpha1_workflow_event_binding_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBinding from a dict +io_argoproj_workflow_v1alpha1_workflow_event_binding_form_dict = io_argoproj_workflow_v1alpha1_workflow_event_binding.from_dict(io_argoproj_workflow_v1alpha1_workflow_event_binding_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBindingList.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBindingList.md index 218314a4bba6..212d17038381 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBindingList.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBindingList.md @@ -3,14 +3,31 @@ WorkflowEventBindingList is list of event resources ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[IoArgoprojWorkflowV1alpha1WorkflowEventBinding]**](IoArgoprojWorkflowV1alpha1WorkflowEventBinding.md) | | -**metadata** | [**ListMeta**](ListMeta.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] +**items** | [**List[IoArgoprojWorkflowV1alpha1WorkflowEventBinding]**](IoArgoprojWorkflowV1alpha1WorkflowEventBinding.md) | | **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**metadata** | [**ListMeta**](ListMeta.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding_list import IoArgoprojWorkflowV1alpha1WorkflowEventBindingList + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBindingList from a JSON string +io_argoproj_workflow_v1alpha1_workflow_event_binding_list_instance = IoArgoprojWorkflowV1alpha1WorkflowEventBindingList.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowEventBindingList.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_event_binding_list_dict = io_argoproj_workflow_v1alpha1_workflow_event_binding_list_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBindingList from a dict +io_argoproj_workflow_v1alpha1_workflow_event_binding_list_form_dict = io_argoproj_workflow_v1alpha1_workflow_event_binding_list.from_dict(io_argoproj_workflow_v1alpha1_workflow_event_binding_list_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec.md index 58be5808d02c..a375569146d0 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **event** | [**IoArgoprojWorkflowV1alpha1Event**](IoArgoprojWorkflowV1alpha1Event.md) | | **submit** | [**IoArgoprojWorkflowV1alpha1Submit**](IoArgoprojWorkflowV1alpha1Submit.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_event_binding_spec import IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec from a JSON string +io_argoproj_workflow_v1alpha1_workflow_event_binding_spec_instance = IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_event_binding_spec_dict = io_argoproj_workflow_v1alpha1_workflow_event_binding_spec_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowEventBindingSpec from a dict +io_argoproj_workflow_v1alpha1_workflow_event_binding_spec_form_dict = io_argoproj_workflow_v1alpha1_workflow_event_binding_spec.from_dict(io_argoproj_workflow_v1alpha1_workflow_event_binding_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC.md index 748122b26c1d..e81bb3cf8c83 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC.md @@ -3,6 +3,7 @@ WorkflowLevelArtifactGC describes how to delete artifacts from completed Workflows - this spec is used on the Workflow level ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **force_finalizer_removal** | **bool** | ForceFinalizerRemoval: if set to true, the finalizer will be removed in the case that Artifact GC fails | [optional] @@ -10,8 +11,24 @@ Name | Type | Description | Notes **pod_spec_patch** | **str** | PodSpecPatch holds strategic merge patch to apply against the artgc pod spec. | [optional] **service_account_name** | **str** | ServiceAccountName is an optional field for specifying the Service Account that should be assigned to the Pod doing the deletion | [optional] **strategy** | **str** | Strategy is the strategy to use. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc import IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC from a JSON string +io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc_instance = IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc_dict = io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC from a dict +io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc_form_dict = io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc.from_dict(io_argoproj_workflow_v1alpha1_workflow_level_artifact_gc_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowLintRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowLintRequest.md index f696863e709f..04a49686b009 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowLintRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowLintRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **namespace** | **str** | | [optional] **workflow** | [**IoArgoprojWorkflowV1alpha1Workflow**](IoArgoprojWorkflowV1alpha1Workflow.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_lint_request import IoArgoprojWorkflowV1alpha1WorkflowLintRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowLintRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_lint_request_instance = IoArgoprojWorkflowV1alpha1WorkflowLintRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowLintRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_lint_request_dict = io_argoproj_workflow_v1alpha1_workflow_lint_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowLintRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_lint_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_lint_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_lint_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowList.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowList.md index 5ab075f5de3f..d445ce692064 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowList.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowList.md @@ -3,14 +3,31 @@ WorkflowList is list of Workflow resources ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[IoArgoprojWorkflowV1alpha1Workflow]**](IoArgoprojWorkflowV1alpha1Workflow.md) | | -**metadata** | [**ListMeta**](ListMeta.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] +**items** | [**List[IoArgoprojWorkflowV1alpha1Workflow]**](IoArgoprojWorkflowV1alpha1Workflow.md) | | **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**metadata** | [**ListMeta**](ListMeta.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowList from a JSON string +io_argoproj_workflow_v1alpha1_workflow_list_instance = IoArgoprojWorkflowV1alpha1WorkflowList.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowList.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_list_dict = io_argoproj_workflow_v1alpha1_workflow_list_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowList from a dict +io_argoproj_workflow_v1alpha1_workflow_list_form_dict = io_argoproj_workflow_v1alpha1_workflow_list.from_dict(io_argoproj_workflow_v1alpha1_workflow_list_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowMetadata.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowMetadata.md index 589a1e6c3ff7..411ac3e164ee 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowMetadata.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowMetadata.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**annotations** | **{str: (str,)}** | | [optional] -**labels** | **{str: (str,)}** | | [optional] -**labels_from** | [**{str: (IoArgoprojWorkflowV1alpha1LabelValueFrom,)}**](IoArgoprojWorkflowV1alpha1LabelValueFrom.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**annotations** | **Dict[str, str]** | | [optional] +**labels** | **Dict[str, str]** | | [optional] +**labels_from** | [**Dict[str, IoArgoprojWorkflowV1alpha1LabelValueFrom]**](IoArgoprojWorkflowV1alpha1LabelValueFrom.md) | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_metadata import IoArgoprojWorkflowV1alpha1WorkflowMetadata + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowMetadata from a JSON string +io_argoproj_workflow_v1alpha1_workflow_metadata_instance = IoArgoprojWorkflowV1alpha1WorkflowMetadata.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowMetadata.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_metadata_dict = io_argoproj_workflow_v1alpha1_workflow_metadata_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowMetadata from a dict +io_argoproj_workflow_v1alpha1_workflow_metadata_form_dict = io_argoproj_workflow_v1alpha1_workflow_metadata.from_dict(io_argoproj_workflow_v1alpha1_workflow_metadata_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md index fc1293c91ae8..11f0856d538a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **memoized** | **bool** | | [optional] **name** | **str** | | [optional] **namespace** | **str** | | [optional] -**parameters** | **[str]** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**parameters** | **List[str]** | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resubmit_request import IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_resubmit_request_instance = IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_resubmit_request_dict = io_argoproj_workflow_v1alpha1_workflow_resubmit_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_resubmit_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_resubmit_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_resubmit_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResumeRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResumeRequest.md index f3de147980ff..b0e89473d285 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResumeRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowResumeRequest.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **namespace** | **str** | | [optional] **node_field_selector** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resume_request import IoArgoprojWorkflowV1alpha1WorkflowResumeRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowResumeRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_resume_request_instance = IoArgoprojWorkflowV1alpha1WorkflowResumeRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowResumeRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_resume_request_dict = io_argoproj_workflow_v1alpha1_workflow_resume_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowResumeRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_resume_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_resume_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_resume_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md index 11069d64d1b8..d21d388fbea3 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md @@ -2,15 +2,32 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **namespace** | **str** | | [optional] **node_field_selector** | **str** | | [optional] -**parameters** | **[str]** | | [optional] +**parameters** | **List[str]** | | [optional] **restart_successful** | **bool** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_retry_request import IoArgoprojWorkflowV1alpha1WorkflowRetryRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowRetryRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_retry_request_instance = IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_retry_request_dict = io_argoproj_workflow_v1alpha1_workflow_retry_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowRetryRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_retry_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_retry_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_retry_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSetRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSetRequest.md index 50ee7155117b..ffcdb633c454 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSetRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSetRequest.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **message** | **str** | | [optional] @@ -10,8 +11,24 @@ Name | Type | Description | Notes **node_field_selector** | **str** | | [optional] **output_parameters** | **str** | | [optional] **phase** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_set_request import IoArgoprojWorkflowV1alpha1WorkflowSetRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowSetRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_set_request_instance = IoArgoprojWorkflowV1alpha1WorkflowSetRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowSetRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_set_request_dict = io_argoproj_workflow_v1alpha1_workflow_set_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowSetRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_set_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_set_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_set_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md index 3f9ded2612a9..8c56e624f3cb 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSpec.md @@ -3,6 +3,7 @@ WorkflowSpec is the specification of a Workflow. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **active_deadline_seconds** | **int** | Optional duration in seconds relative to the workflow start time which the workflow is allowed to run before the controller terminates the io.argoproj.workflow.v1alpha1. A value of zero is used to terminate a Running workflow | [optional] @@ -16,12 +17,12 @@ Name | Type | Description | Notes **dns_policy** | **str** | Set DNS policy for workflow pods. Defaults to \"ClusterFirst\". Valid values are 'ClusterFirstWithHostNet', 'ClusterFirst', 'Default' or 'None'. DNS parameters given in DNSConfig will be merged with the policy selected with DNSPolicy. To have DNS options set along with hostNetwork, you have to specify DNS policy explicitly to 'ClusterFirstWithHostNet'. | [optional] **entrypoint** | **str** | Entrypoint is a template reference to the starting point of the io.argoproj.workflow.v1alpha1. | [optional] **executor** | [**IoArgoprojWorkflowV1alpha1ExecutorConfig**](IoArgoprojWorkflowV1alpha1ExecutorConfig.md) | | [optional] -**hooks** | [**{str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)}**](IoArgoprojWorkflowV1alpha1LifecycleHook.md) | Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step | [optional] -**host_aliases** | [**[HostAlias]**](HostAlias.md) | | [optional] +**hooks** | [**Dict[str, IoArgoprojWorkflowV1alpha1LifecycleHook]**](IoArgoprojWorkflowV1alpha1LifecycleHook.md) | Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step | [optional] +**host_aliases** | [**List[HostAlias]**](HostAlias.md) | | [optional] **host_network** | **bool** | Host networking requested for this workflow pod. Default to false. | [optional] -**image_pull_secrets** | [**[LocalObjectReference]**](LocalObjectReference.md) | ImagePullSecrets is a list of references to secrets in the same namespace to use for pulling any images in pods that reference this ServiceAccount. ImagePullSecrets are distinct from Secrets because Secrets can be mounted in the pod, but ImagePullSecrets are only accessed by the kubelet. More info: https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod | [optional] +**image_pull_secrets** | [**List[LocalObjectReference]**](LocalObjectReference.md) | ImagePullSecrets is a list of references to secrets in the same namespace to use for pulling any images in pods that reference this ServiceAccount. ImagePullSecrets are distinct from Secrets because Secrets can be mounted in the pod, but ImagePullSecrets are only accessed by the kubelet. More info: https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod | [optional] **metrics** | [**IoArgoprojWorkflowV1alpha1Metrics**](IoArgoprojWorkflowV1alpha1Metrics.md) | | [optional] -**node_selector** | **{str: (str,)}** | NodeSelector is a selector which will result in all pods of the workflow to be scheduled on the selected node(s). This is able to be overridden by a nodeSelector specified in the template. | [optional] +**node_selector** | **Dict[str, str]** | NodeSelector is a selector which will result in all pods of the workflow to be scheduled on the selected node(s). This is able to be overridden by a nodeSelector specified in the template. | [optional] **on_exit** | **str** | OnExit is a template reference which is invoked at the end of the workflow, irrespective of the success, failure, or error of the primary io.argoproj.workflow.v1alpha1. | [optional] **parallelism** | **int** | Parallelism limits the max total parallel pods that can execute at the same time in a workflow | [optional] **pod_disruption_budget** | [**IoK8sApiPolicyV1PodDisruptionBudgetSpec**](IoK8sApiPolicyV1PodDisruptionBudgetSpec.md) | | [optional] @@ -39,16 +40,32 @@ Name | Type | Description | Notes **suspend** | **bool** | Suspend will suspend the workflow and prevent execution of any future steps in the workflow | [optional] **synchronization** | [**IoArgoprojWorkflowV1alpha1Synchronization**](IoArgoprojWorkflowV1alpha1Synchronization.md) | | [optional] **template_defaults** | [**IoArgoprojWorkflowV1alpha1Template**](IoArgoprojWorkflowV1alpha1Template.md) | | [optional] -**templates** | [**[IoArgoprojWorkflowV1alpha1Template]**](IoArgoprojWorkflowV1alpha1Template.md) | Templates is a list of workflow templates used in a workflow | [optional] -**tolerations** | [**[Toleration]**](Toleration.md) | Tolerations to apply to workflow pods. | [optional] +**templates** | [**List[IoArgoprojWorkflowV1alpha1Template]**](IoArgoprojWorkflowV1alpha1Template.md) | Templates is a list of workflow templates used in a workflow | [optional] +**tolerations** | [**List[Toleration]**](Toleration.md) | Tolerations to apply to workflow pods. | [optional] **ttl_strategy** | [**IoArgoprojWorkflowV1alpha1TTLStrategy**](IoArgoprojWorkflowV1alpha1TTLStrategy.md) | | [optional] **volume_claim_gc** | [**IoArgoprojWorkflowV1alpha1VolumeClaimGC**](IoArgoprojWorkflowV1alpha1VolumeClaimGC.md) | | [optional] -**volume_claim_templates** | [**[PersistentVolumeClaim]**](PersistentVolumeClaim.md) | VolumeClaimTemplates is a list of claims that containers are allowed to reference. The Workflow controller will create the claims at the beginning of the workflow and delete the claims upon completion of the workflow | [optional] -**volumes** | [**[Volume]**](Volume.md) | Volumes is a list of volumes that can be mounted by containers in a io.argoproj.workflow.v1alpha1. | [optional] +**volume_claim_templates** | [**List[PersistentVolumeClaim]**](PersistentVolumeClaim.md) | VolumeClaimTemplates is a list of claims that containers are allowed to reference. The Workflow controller will create the claims at the beginning of the workflow and delete the claims upon completion of the workflow | [optional] +**volumes** | [**List[Volume]**](Volume.md) | Volumes is a list of volumes that can be mounted by containers in a io.argoproj.workflow.v1alpha1. | [optional] **workflow_metadata** | [**IoArgoprojWorkflowV1alpha1WorkflowMetadata**](IoArgoprojWorkflowV1alpha1WorkflowMetadata.md) | | [optional] **workflow_template_ref** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplateRef**](IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowSpec from a JSON string +io_argoproj_workflow_v1alpha1_workflow_spec_instance = IoArgoprojWorkflowV1alpha1WorkflowSpec.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowSpec.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_spec_dict = io_argoproj_workflow_v1alpha1_workflow_spec_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowSpec from a dict +io_argoproj_workflow_v1alpha1_workflow_spec_form_dict = io_argoproj_workflow_v1alpha1_workflow_spec.from_dict(io_argoproj_workflow_v1alpha1_workflow_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md index c46412b8d3c8..3fb319d3d1a3 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md @@ -3,29 +3,46 @@ WorkflowStatus contains overall status information about a workflow ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **artifact_gc_status** | [**IoArgoprojWorkflowV1alpha1ArtGCStatus**](IoArgoprojWorkflowV1alpha1ArtGCStatus.md) | | [optional] **artifact_repository_ref** | [**IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus**](IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus.md) | | [optional] **compressed_nodes** | **str** | Compressed and base64 decoded Nodes map | [optional] -**conditions** | [**[IoArgoprojWorkflowV1alpha1Condition]**](IoArgoprojWorkflowV1alpha1Condition.md) | Conditions is a list of conditions the Workflow may have | [optional] +**conditions** | [**List[IoArgoprojWorkflowV1alpha1Condition]**](IoArgoprojWorkflowV1alpha1Condition.md) | Conditions is a list of conditions the Workflow may have | [optional] **estimated_duration** | **int** | EstimatedDuration in seconds. | [optional] **finished_at** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **message** | **str** | A human readable message indicating details about why the workflow is in this condition. | [optional] -**nodes** | [**{str: (IoArgoprojWorkflowV1alpha1NodeStatus,)}**](IoArgoprojWorkflowV1alpha1NodeStatus.md) | Nodes is a mapping between a node ID and the node's status. | [optional] +**nodes** | [**Dict[str, IoArgoprojWorkflowV1alpha1NodeStatus]**](IoArgoprojWorkflowV1alpha1NodeStatus.md) | Nodes is a mapping between a node ID and the node's status. | [optional] **offload_node_status_version** | **str** | Whether on not node status has been offloaded to a database. If exists, then Nodes and CompressedNodes will be empty. This will actually be populated with a hash of the offloaded data. | [optional] **outputs** | [**IoArgoprojWorkflowV1alpha1Outputs**](IoArgoprojWorkflowV1alpha1Outputs.md) | | [optional] -**persistent_volume_claims** | [**[Volume]**](Volume.md) | PersistentVolumeClaims tracks all PVCs that were created as part of the io.argoproj.workflow.v1alpha1. The contents of this list are drained at the end of the workflow. | [optional] +**persistent_volume_claims** | [**List[Volume]**](Volume.md) | PersistentVolumeClaims tracks all PVCs that were created as part of the io.argoproj.workflow.v1alpha1. The contents of this list are drained at the end of the workflow. | [optional] **phase** | **str** | Phase a simple, high-level summary of where the workflow is in its lifecycle. Will be \"\" (Unknown), \"Pending\", or \"Running\" before the workflow is completed, and \"Succeeded\", \"Failed\" or \"Error\" once the workflow has completed. | [optional] **progress** | **str** | Progress to completion | [optional] -**resources_duration** | **{str: (int,)}** | ResourcesDuration is the total for the workflow | [optional] +**resources_duration** | **Dict[str, int]** | ResourcesDuration is the total for the workflow | [optional] **started_at** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] -**stored_templates** | [**{str: (IoArgoprojWorkflowV1alpha1Template,)}**](IoArgoprojWorkflowV1alpha1Template.md) | StoredTemplates is a mapping between a template ref and the node's status. | [optional] +**stored_templates** | [**Dict[str, IoArgoprojWorkflowV1alpha1Template]**](IoArgoprojWorkflowV1alpha1Template.md) | StoredTemplates is a mapping between a template ref and the node's status. | [optional] **stored_workflow_template_spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | [optional] **synchronization** | [**IoArgoprojWorkflowV1alpha1SynchronizationStatus**](IoArgoprojWorkflowV1alpha1SynchronizationStatus.md) | | [optional] -**task_results_completion_status** | **{str: (bool,)}** | TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**task_results_completion_status** | **Dict[str, bool]** | TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection. | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_status import IoArgoprojWorkflowV1alpha1WorkflowStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowStatus from a JSON string +io_argoproj_workflow_v1alpha1_workflow_status_instance = IoArgoprojWorkflowV1alpha1WorkflowStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowStatus.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_status_dict = io_argoproj_workflow_v1alpha1_workflow_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowStatus from a dict +io_argoproj_workflow_v1alpha1_workflow_status_form_dict = io_argoproj_workflow_v1alpha1_workflow_status.from_dict(io_argoproj_workflow_v1alpha1_workflow_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStep.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStep.md index 0e3d04c0cda3..2704fe9f8f50 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStep.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStep.md @@ -3,22 +3,39 @@ WorkflowStep is a reference to a template to execute in a series of step ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **arguments** | [**IoArgoprojWorkflowV1alpha1Arguments**](IoArgoprojWorkflowV1alpha1Arguments.md) | | [optional] **continue_on** | [**IoArgoprojWorkflowV1alpha1ContinueOn**](IoArgoprojWorkflowV1alpha1ContinueOn.md) | | [optional] -**hooks** | [**{str: (IoArgoprojWorkflowV1alpha1LifecycleHook,)}**](IoArgoprojWorkflowV1alpha1LifecycleHook.md) | Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step | [optional] +**hooks** | [**Dict[str, IoArgoprojWorkflowV1alpha1LifecycleHook]**](IoArgoprojWorkflowV1alpha1LifecycleHook.md) | Hooks holds the lifecycle hook which is invoked at lifecycle of step, irrespective of the success, failure, or error status of the primary step | [optional] **inline** | [**IoArgoprojWorkflowV1alpha1Template**](IoArgoprojWorkflowV1alpha1Template.md) | | [optional] **name** | **str** | Name of the step | [optional] **on_exit** | **str** | OnExit is a template reference which is invoked at the end of the template, irrespective of the success, failure, or error of the primary template. DEPRECATED: Use Hooks[exit].Template instead. | [optional] **template** | **str** | Template is the name of the template to execute as the step | [optional] **template_ref** | [**IoArgoprojWorkflowV1alpha1TemplateRef**](IoArgoprojWorkflowV1alpha1TemplateRef.md) | | [optional] **when** | **str** | When is an expression in which the step should conditionally execute | [optional] -**with_items** | **[dict]** | WithItems expands a step into multiple parallel steps from the items in the list | [optional] +**with_items** | **List[object]** | WithItems expands a step into multiple parallel steps from the items in the list | [optional] **with_param** | **str** | WithParam expands a step into multiple parallel steps from the value in the parameter, which is expected to be a JSON list. | [optional] **with_sequence** | [**IoArgoprojWorkflowV1alpha1Sequence**](IoArgoprojWorkflowV1alpha1Sequence.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_step import IoArgoprojWorkflowV1alpha1WorkflowStep + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowStep from a JSON string +io_argoproj_workflow_v1alpha1_workflow_step_instance = IoArgoprojWorkflowV1alpha1WorkflowStep.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowStep.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_step_dict = io_argoproj_workflow_v1alpha1_workflow_step_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowStep from a dict +io_argoproj_workflow_v1alpha1_workflow_step_form_dict = io_argoproj_workflow_v1alpha1_workflow_step.from_dict(io_argoproj_workflow_v1alpha1_workflow_step_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStopRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStopRequest.md index eab3b6d245c5..90caf43a05d9 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStopRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStopRequest.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **message** | **str** | | [optional] **name** | **str** | | [optional] **namespace** | **str** | | [optional] **node_field_selector** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_stop_request import IoArgoprojWorkflowV1alpha1WorkflowStopRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowStopRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_stop_request_instance = IoArgoprojWorkflowV1alpha1WorkflowStopRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowStopRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_stop_request_dict = io_argoproj_workflow_v1alpha1_workflow_stop_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowStopRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_stop_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_stop_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_stop_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest.md index 99e3d5bf4e93..f14f5c9e69f5 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest.md @@ -2,14 +2,31 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **namespace** | **str** | | [optional] **resource_kind** | **str** | | [optional] **resource_name** | **str** | | [optional] **submit_options** | [**IoArgoprojWorkflowV1alpha1SubmitOpts**](IoArgoprojWorkflowV1alpha1SubmitOpts.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_submit_request import IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_submit_request_instance = IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_submit_request_dict = io_argoproj_workflow_v1alpha1_workflow_submit_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_submit_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_submit_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_submit_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest.md index c0a69b25b6bb..c7295b6fca5c 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_suspend_request import IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_suspend_request_instance = IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_suspend_request_dict = io_argoproj_workflow_v1alpha1_workflow_suspend_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_suspend_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_suspend_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_suspend_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec.md index f60e79575ea9..0e86206c6deb 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**tasks** | [**{str: (IoArgoprojWorkflowV1alpha1Template,)}**](IoArgoprojWorkflowV1alpha1Template.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**tasks** | [**Dict[str, IoArgoprojWorkflowV1alpha1Template]**](IoArgoprojWorkflowV1alpha1Template.md) | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_task_set_spec import IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec from a JSON string +io_argoproj_workflow_v1alpha1_workflow_task_set_spec_instance = IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_task_set_spec_dict = io_argoproj_workflow_v1alpha1_workflow_task_set_spec_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTaskSetSpec from a dict +io_argoproj_workflow_v1alpha1_workflow_task_set_spec_form_dict = io_argoproj_workflow_v1alpha1_workflow_task_set_spec.from_dict(io_argoproj_workflow_v1alpha1_workflow_task_set_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus.md index 733dd32854c4..6495b033243c 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus.md @@ -2,11 +2,28 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**nodes** | [**{str: (IoArgoprojWorkflowV1alpha1NodeResult,)}**](IoArgoprojWorkflowV1alpha1NodeResult.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**nodes** | [**Dict[str, IoArgoprojWorkflowV1alpha1NodeResult]**](IoArgoprojWorkflowV1alpha1NodeResult.md) | | [optional] + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_task_set_status import IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus from a JSON string +io_argoproj_workflow_v1alpha1_workflow_task_set_status_instance = IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_task_set_status_dict = io_argoproj_workflow_v1alpha1_workflow_task_set_status_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTaskSetStatus from a dict +io_argoproj_workflow_v1alpha1_workflow_task_set_status_form_dict = io_argoproj_workflow_v1alpha1_workflow_task_set_status.from_dict(io_argoproj_workflow_v1alpha1_workflow_task_set_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplate.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplate.md index 932ccdf2006f..75f56c8594b8 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplate.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplate.md @@ -3,14 +3,31 @@ WorkflowTemplate is the definition of a workflow template resource ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**metadata** | [**ObjectMeta**](ObjectMeta.md) | | -**spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**metadata** | [**ObjectMeta**](ObjectMeta.md) | | +**spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplate from a JSON string +io_argoproj_workflow_v1alpha1_workflow_template_instance = IoArgoprojWorkflowV1alpha1WorkflowTemplate.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowTemplate.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_template_dict = io_argoproj_workflow_v1alpha1_workflow_template_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplate from a dict +io_argoproj_workflow_v1alpha1_workflow_template_form_dict = io_argoproj_workflow_v1alpha1_workflow_template.from_dict(io_argoproj_workflow_v1alpha1_workflow_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest.md index c2952d52d91f..07bc194d505f 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **create_options** | [**CreateOptions**](CreateOptions.md) | | [optional] **namespace** | **str** | | [optional] **template** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplate**](IoArgoprojWorkflowV1alpha1WorkflowTemplate.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_create_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_template_create_request_instance = IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_template_create_request_dict = io_argoproj_workflow_v1alpha1_workflow_template_create_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_template_create_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_template_create_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_template_create_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest.md index 881e9b34904d..47b816dd0020 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **create_options** | [**CreateOptions**](CreateOptions.md) | | [optional] **namespace** | **str** | | [optional] **template** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplate**](IoArgoprojWorkflowV1alpha1WorkflowTemplate.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_template_lint_request_instance = IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_template_lint_request_dict = io_argoproj_workflow_v1alpha1_workflow_template_lint_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_template_lint_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_template_lint_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_template_lint_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateList.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateList.md index babd902ccfe0..941ba4786df8 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateList.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateList.md @@ -3,14 +3,31 @@ WorkflowTemplateList is list of WorkflowTemplate resources ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[IoArgoprojWorkflowV1alpha1WorkflowTemplate]**](IoArgoprojWorkflowV1alpha1WorkflowTemplate.md) | | -**metadata** | [**ListMeta**](ListMeta.md) | | **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] +**items** | [**List[IoArgoprojWorkflowV1alpha1WorkflowTemplate]**](IoArgoprojWorkflowV1alpha1WorkflowTemplate.md) | | **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**metadata** | [**ListMeta**](ListMeta.md) | | + +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_list import IoArgoprojWorkflowV1alpha1WorkflowTemplateList + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateList from a JSON string +io_argoproj_workflow_v1alpha1_workflow_template_list_instance = IoArgoprojWorkflowV1alpha1WorkflowTemplateList.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowTemplateList.to_json()) +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_template_list_dict = io_argoproj_workflow_v1alpha1_workflow_template_list_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateList from a dict +io_argoproj_workflow_v1alpha1_workflow_template_list_form_dict = io_argoproj_workflow_v1alpha1_workflow_template_list.from_dict(io_argoproj_workflow_v1alpha1_workflow_template_list_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.md index fd4d223f53d2..39c3ce3124d2 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.md @@ -3,12 +3,29 @@ WorkflowTemplateRef is a reference to a WorkflowTemplate resource. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cluster_scope** | **bool** | ClusterScope indicates the referred template is cluster scoped (i.e. a ClusterWorkflowTemplate). | [optional] **name** | **str** | Name is the resource name of the workflow template. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_ref import IoArgoprojWorkflowV1alpha1WorkflowTemplateRef + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateRef from a JSON string +io_argoproj_workflow_v1alpha1_workflow_template_ref_instance = IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowTemplateRef.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_template_ref_dict = io_argoproj_workflow_v1alpha1_workflow_template_ref_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateRef from a dict +io_argoproj_workflow_v1alpha1_workflow_template_ref_form_dict = io_argoproj_workflow_v1alpha1_workflow_template_ref.from_dict(io_argoproj_workflow_v1alpha1_workflow_template_ref_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest.md index 2ba336548549..ccf041f20fec 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | DEPRECATED: This field is ignored. | [optional] **namespace** | **str** | | [optional] **template** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplate**](IoArgoprojWorkflowV1alpha1WorkflowTemplate.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_update_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_template_update_request_instance = IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_template_update_request_dict = io_argoproj_workflow_v1alpha1_workflow_template_update_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_template_update_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_template_update_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_template_update_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest.md index 4b921d325fc5..7c9bcb67f14a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **namespace** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_terminate_request import IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest from a JSON string +io_argoproj_workflow_v1alpha1_workflow_terminate_request_instance = IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_terminate_request_dict = io_argoproj_workflow_v1alpha1_workflow_terminate_request_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest from a dict +io_argoproj_workflow_v1alpha1_workflow_terminate_request_form_dict = io_argoproj_workflow_v1alpha1_workflow_terminate_request.from_dict(io_argoproj_workflow_v1alpha1_workflow_terminate_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowWatchEvent.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowWatchEvent.md index 004b3ad18287..4d698609e91a 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowWatchEvent.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowWatchEvent.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **object** | [**IoArgoprojWorkflowV1alpha1Workflow**](IoArgoprojWorkflowV1alpha1Workflow.md) | | [optional] **type** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_watch_event import IoArgoprojWorkflowV1alpha1WorkflowWatchEvent + +# TODO update the JSON string below +json = "{}" +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowWatchEvent from a JSON string +io_argoproj_workflow_v1alpha1_workflow_watch_event_instance = IoArgoprojWorkflowV1alpha1WorkflowWatchEvent.from_json(json) +# print the JSON string representation of the object +print(IoArgoprojWorkflowV1alpha1WorkflowWatchEvent.to_json()) + +# convert the object into a dict +io_argoproj_workflow_v1alpha1_workflow_watch_event_dict = io_argoproj_workflow_v1alpha1_workflow_watch_event_instance.to_dict() +# create an instance of IoArgoprojWorkflowV1alpha1WorkflowWatchEvent from a dict +io_argoproj_workflow_v1alpha1_workflow_watch_event_form_dict = io_argoproj_workflow_v1alpha1_workflow_watch_event.from_dict(io_argoproj_workflow_v1alpha1_workflow_watch_event_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoK8sApiPolicyV1PodDisruptionBudgetSpec.md b/sdks/python/client/docs/IoK8sApiPolicyV1PodDisruptionBudgetSpec.md index 07dbace72e08..c2f7534abb38 100644 --- a/sdks/python/client/docs/IoK8sApiPolicyV1PodDisruptionBudgetSpec.md +++ b/sdks/python/client/docs/IoK8sApiPolicyV1PodDisruptionBudgetSpec.md @@ -3,13 +3,30 @@ PodDisruptionBudgetSpec is a description of a PodDisruptionBudget. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **max_unavailable** | **str** | | [optional] **min_available** | **str** | | [optional] **selector** | [**LabelSelector**](LabelSelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.io_k8s_api_policy_v1_pod_disruption_budget_spec import IoK8sApiPolicyV1PodDisruptionBudgetSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of IoK8sApiPolicyV1PodDisruptionBudgetSpec from a JSON string +io_k8s_api_policy_v1_pod_disruption_budget_spec_instance = IoK8sApiPolicyV1PodDisruptionBudgetSpec.from_json(json) +# print the JSON string representation of the object +print(IoK8sApiPolicyV1PodDisruptionBudgetSpec.to_json()) + +# convert the object into a dict +io_k8s_api_policy_v1_pod_disruption_budget_spec_dict = io_k8s_api_policy_v1_pod_disruption_budget_spec_instance.to_dict() +# create an instance of IoK8sApiPolicyV1PodDisruptionBudgetSpec from a dict +io_k8s_api_policy_v1_pod_disruption_budget_spec_form_dict = io_k8s_api_policy_v1_pod_disruption_budget_spec.from_dict(io_k8s_api_policy_v1_pod_disruption_budget_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/KeyToPath.md b/sdks/python/client/docs/KeyToPath.md index 1e57cc00ab57..d2b5b346689e 100644 --- a/sdks/python/client/docs/KeyToPath.md +++ b/sdks/python/client/docs/KeyToPath.md @@ -3,13 +3,30 @@ Maps a string key to a path within a volume. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | The key to project. | -**path** | **str** | The relative path of the file to map the key to. May not be an absolute path. May not contain the path element '..'. May not start with the string '..'. | **mode** | **int** | Optional: mode bits used to set permissions on this file. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**path** | **str** | The relative path of the file to map the key to. May not be an absolute path. May not contain the path element '..'. May not start with the string '..'. | + +## Example + +```python +from argo_workflows.models.key_to_path import KeyToPath + +# TODO update the JSON string below +json = "{}" +# create an instance of KeyToPath from a JSON string +key_to_path_instance = KeyToPath.from_json(json) +# print the JSON string representation of the object +print(KeyToPath.to_json()) +# convert the object into a dict +key_to_path_dict = key_to_path_instance.to_dict() +# create an instance of KeyToPath from a dict +key_to_path_form_dict = key_to_path.from_dict(key_to_path_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/LabelSelector.md b/sdks/python/client/docs/LabelSelector.md index 6a06235728eb..2cafbdd2ca42 100644 --- a/sdks/python/client/docs/LabelSelector.md +++ b/sdks/python/client/docs/LabelSelector.md @@ -3,12 +3,29 @@ A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**match_expressions** | [**[LabelSelectorRequirement]**](LabelSelectorRequirement.md) | matchExpressions is a list of label selector requirements. The requirements are ANDed. | [optional] -**match_labels** | **{str: (str,)}** | matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is \"key\", the operator is \"In\", and the values array contains only \"value\". The requirements are ANDed. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**match_expressions** | [**List[LabelSelectorRequirement]**](LabelSelectorRequirement.md) | matchExpressions is a list of label selector requirements. The requirements are ANDed. | [optional] +**match_labels** | **Dict[str, str]** | matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is \"key\", the operator is \"In\", and the values array contains only \"value\". The requirements are ANDed. | [optional] + +## Example + +```python +from argo_workflows.models.label_selector import LabelSelector + +# TODO update the JSON string below +json = "{}" +# create an instance of LabelSelector from a JSON string +label_selector_instance = LabelSelector.from_json(json) +# print the JSON string representation of the object +print(LabelSelector.to_json()) +# convert the object into a dict +label_selector_dict = label_selector_instance.to_dict() +# create an instance of LabelSelector from a dict +label_selector_form_dict = label_selector.from_dict(label_selector_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/LabelSelectorRequirement.md b/sdks/python/client/docs/LabelSelectorRequirement.md index 8f0891dd8925..b649cdd94b0b 100644 --- a/sdks/python/client/docs/LabelSelectorRequirement.md +++ b/sdks/python/client/docs/LabelSelectorRequirement.md @@ -3,13 +3,30 @@ A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | key is the label key that the selector applies to. | **operator** | **str** | operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. | -**values** | **[str]** | values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**values** | **List[str]** | values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. | [optional] + +## Example + +```python +from argo_workflows.models.label_selector_requirement import LabelSelectorRequirement + +# TODO update the JSON string below +json = "{}" +# create an instance of LabelSelectorRequirement from a JSON string +label_selector_requirement_instance = LabelSelectorRequirement.from_json(json) +# print the JSON string representation of the object +print(LabelSelectorRequirement.to_json()) +# convert the object into a dict +label_selector_requirement_dict = label_selector_requirement_instance.to_dict() +# create an instance of LabelSelectorRequirement from a dict +label_selector_requirement_form_dict = label_selector_requirement.from_dict(label_selector_requirement_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Lifecycle.md b/sdks/python/client/docs/Lifecycle.md index 906eeb53f22b..8f4eae7b96b8 100644 --- a/sdks/python/client/docs/Lifecycle.md +++ b/sdks/python/client/docs/Lifecycle.md @@ -3,12 +3,29 @@ Lifecycle describes actions that the management system should take in response to container lifecycle events. For the PostStart and PreStop lifecycle handlers, management of the container blocks until the action is complete, unless the container process fails, in which case the handler is aborted. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **post_start** | [**LifecycleHandler**](LifecycleHandler.md) | | [optional] **pre_stop** | [**LifecycleHandler**](LifecycleHandler.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.lifecycle import Lifecycle + +# TODO update the JSON string below +json = "{}" +# create an instance of Lifecycle from a JSON string +lifecycle_instance = Lifecycle.from_json(json) +# print the JSON string representation of the object +print(Lifecycle.to_json()) + +# convert the object into a dict +lifecycle_dict = lifecycle_instance.to_dict() +# create an instance of Lifecycle from a dict +lifecycle_form_dict = lifecycle.from_dict(lifecycle_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/LifecycleHandler.md b/sdks/python/client/docs/LifecycleHandler.md index daf29a37d0cb..578c834347d4 100644 --- a/sdks/python/client/docs/LifecycleHandler.md +++ b/sdks/python/client/docs/LifecycleHandler.md @@ -3,13 +3,30 @@ LifecycleHandler defines a specific action that should be taken in a lifecycle hook. One and only one of the fields, except TCPSocket must be specified. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**_exec** | [**ExecAction**](ExecAction.md) | | [optional] +**var_exec** | [**ExecAction**](ExecAction.md) | | [optional] **http_get** | [**HTTPGetAction**](HTTPGetAction.md) | | [optional] **tcp_socket** | [**TCPSocketAction**](TCPSocketAction.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.lifecycle_handler import LifecycleHandler + +# TODO update the JSON string below +json = "{}" +# create an instance of LifecycleHandler from a JSON string +lifecycle_handler_instance = LifecycleHandler.from_json(json) +# print the JSON string representation of the object +print(LifecycleHandler.to_json()) + +# convert the object into a dict +lifecycle_handler_dict = lifecycle_handler_instance.to_dict() +# create an instance of LifecycleHandler from a dict +lifecycle_handler_form_dict = lifecycle_handler.from_dict(lifecycle_handler_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ListMeta.md b/sdks/python/client/docs/ListMeta.md index 6c7357a98e72..16324d48b072 100644 --- a/sdks/python/client/docs/ListMeta.md +++ b/sdks/python/client/docs/ListMeta.md @@ -3,14 +3,31 @@ ListMeta describes metadata that synthetic resources must have, including lists and various status objects. A resource may have only one of {ObjectMeta, ListMeta}. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**_continue** | **str** | continue may be set if the user set a limit on the number of items returned, and indicates that the server has more data available. The value is opaque and may be used to issue another request to the endpoint that served this list to retrieve the next set of available objects. Continuing a consistent list may not be possible if the server configuration has changed or more than a few minutes have passed. The resourceVersion field returned when using this continue value will be identical to the value in the first response, unless you have received this token from an error message. | [optional] +**var_continue** | **str** | continue may be set if the user set a limit on the number of items returned, and indicates that the server has more data available. The value is opaque and may be used to issue another request to the endpoint that served this list to retrieve the next set of available objects. Continuing a consistent list may not be possible if the server configuration has changed or more than a few minutes have passed. The resourceVersion field returned when using this continue value will be identical to the value in the first response, unless you have received this token from an error message. | [optional] **remaining_item_count** | **int** | remainingItemCount is the number of subsequent items in the list which are not included in this list response. If the list request contained label or field selectors, then the number of remaining items is unknown and the field will be left unset and omitted during serialization. If the list is complete (either because it is not chunking or because this is the last chunk), then there are no more remaining items and this field will be left unset and omitted during serialization. Servers older than v1.15 do not set this field. The intended use of the remainingItemCount is *estimating* the size of a collection. Clients should not rely on the remainingItemCount to be set or to be exact. | [optional] **resource_version** | **str** | String that identifies the server's internal version of this object that can be used by clients to determine when objects have changed. Value must be treated as opaque by clients and passed unmodified back to the server. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency | [optional] **self_link** | **str** | selfLink is a URL representing this object. Populated by the system. Read-only. DEPRECATED Kubernetes will stop propagating this field in 1.20 release and the field is planned to be removed in 1.21 release. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.list_meta import ListMeta + +# TODO update the JSON string below +json = "{}" +# create an instance of ListMeta from a JSON string +list_meta_instance = ListMeta.from_json(json) +# print the JSON string representation of the object +print(ListMeta.to_json()) + +# convert the object into a dict +list_meta_dict = list_meta_instance.to_dict() +# create an instance of ListMeta from a dict +list_meta_form_dict = list_meta.from_dict(list_meta_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/LocalObjectReference.md b/sdks/python/client/docs/LocalObjectReference.md index 4f8ae7b13df5..9db80764ebc5 100644 --- a/sdks/python/client/docs/LocalObjectReference.md +++ b/sdks/python/client/docs/LocalObjectReference.md @@ -3,11 +3,28 @@ LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.local_object_reference import LocalObjectReference + +# TODO update the JSON string below +json = "{}" +# create an instance of LocalObjectReference from a JSON string +local_object_reference_instance = LocalObjectReference.from_json(json) +# print the JSON string representation of the object +print(LocalObjectReference.to_json()) + +# convert the object into a dict +local_object_reference_dict = local_object_reference_instance.to_dict() +# create an instance of LocalObjectReference from a dict +local_object_reference_form_dict = local_object_reference.from_dict(local_object_reference_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ManagedFieldsEntry.md b/sdks/python/client/docs/ManagedFieldsEntry.md index e7b994ae12c5..0a415d179b5f 100644 --- a/sdks/python/client/docs/ManagedFieldsEntry.md +++ b/sdks/python/client/docs/ManagedFieldsEntry.md @@ -3,17 +3,34 @@ ManagedFieldsEntry is a workflow-id, a FieldSet and the group version of the resource that the fieldset applies to. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **api_version** | **str** | APIVersion defines the version of this resource that this field set applies to. The format is \"group/version\" just like the top-level APIVersion field. It is necessary to track the version of a field set because it cannot be automatically converted. | [optional] **fields_type** | **str** | FieldsType is the discriminator for the different fields format and version. There is currently only one possible value: \"FieldsV1\" | [optional] -**fields_v1** | **bool, date, datetime, dict, float, int, list, str, none_type** | FieldsV1 stores a set of fields in a data structure like a Trie, in JSON format. Each key is either a '.' representing the field itself, and will always map to an empty set, or a string representing a sub-field or item. The string will follow one of these four formats: 'f:<name>', where <name> is the name of a field in a struct, or key in a map 'v:<value>', where <value> is the exact json formatted value of a list item 'i:<index>', where <index> is position of a item in a list 'k:<keys>', where <keys> is a map of a list item's key fields to their unique values If a key maps to an empty Fields value, the field that key represents is part of the set. The exact format is defined in sigs.k8s.io/structured-merge-diff | [optional] +**fields_v1** | **object** | FieldsV1 stores a set of fields in a data structure like a Trie, in JSON format. Each key is either a '.' representing the field itself, and will always map to an empty set, or a string representing a sub-field or item. The string will follow one of these four formats: 'f:<name>', where <name> is the name of a field in a struct, or key in a map 'v:<value>', where <value> is the exact json formatted value of a list item 'i:<index>', where <index> is position of a item in a list 'k:<keys>', where <keys> is a map of a list item's key fields to their unique values If a key maps to an empty Fields value, the field that key represents is part of the set. The exact format is defined in sigs.k8s.io/structured-merge-diff | [optional] **manager** | **str** | Manager is an identifier of the workflow managing these fields. | [optional] **operation** | **str** | Operation is the type of operation which lead to this ManagedFieldsEntry being created. The only valid values for this field are 'Apply' and 'Update'. | [optional] **subresource** | **str** | Subresource is the name of the subresource used to update that object, or empty string if the object was updated through the main resource. The value of this field is used to distinguish between managers, even if they share the same name. For example, a status update will be distinct from a regular update using the same manager name. Note that the APIVersion field is not related to the Subresource field and it always corresponds to the version of the main resource. | [optional] **time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.managed_fields_entry import ManagedFieldsEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of ManagedFieldsEntry from a JSON string +managed_fields_entry_instance = ManagedFieldsEntry.from_json(json) +# print the JSON string representation of the object +print(ManagedFieldsEntry.to_json()) + +# convert the object into a dict +managed_fields_entry_dict = managed_fields_entry_instance.to_dict() +# create an instance of ManagedFieldsEntry from a dict +managed_fields_entry_form_dict = managed_fields_entry.from_dict(managed_fields_entry_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/NFSVolumeSource.md b/sdks/python/client/docs/NFSVolumeSource.md index a514712bef47..60fb4c30163a 100644 --- a/sdks/python/client/docs/NFSVolumeSource.md +++ b/sdks/python/client/docs/NFSVolumeSource.md @@ -3,13 +3,30 @@ Represents an NFS mount that lasts the lifetime of a pod. NFS volumes do not support ownership management or SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **path** | **str** | Path that is exported by the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs | -**server** | **str** | Server is the hostname or IP address of the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs | **read_only** | **bool** | ReadOnly here will force the NFS export to be mounted with read-only permissions. Defaults to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**server** | **str** | Server is the hostname or IP address of the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs | + +## Example + +```python +from argo_workflows.models.nfs_volume_source import NFSVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of NFSVolumeSource from a JSON string +nfs_volume_source_instance = NFSVolumeSource.from_json(json) +# print the JSON string representation of the object +print(NFSVolumeSource.to_json()) +# convert the object into a dict +nfs_volume_source_dict = nfs_volume_source_instance.to_dict() +# create an instance of NFSVolumeSource from a dict +nfs_volume_source_form_dict = nfs_volume_source.from_dict(nfs_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/NodeAffinity.md b/sdks/python/client/docs/NodeAffinity.md index 1cc7d3d58bb4..a302798d9813 100644 --- a/sdks/python/client/docs/NodeAffinity.md +++ b/sdks/python/client/docs/NodeAffinity.md @@ -3,12 +3,29 @@ Node affinity is a group of node affinity scheduling rules. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**preferred_during_scheduling_ignored_during_execution** | [**[PreferredSchedulingTerm]**](PreferredSchedulingTerm.md) | The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred. | [optional] +**preferred_during_scheduling_ignored_during_execution** | [**List[PreferredSchedulingTerm]**](PreferredSchedulingTerm.md) | The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred. | [optional] **required_during_scheduling_ignored_during_execution** | [**NodeSelector**](NodeSelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.node_affinity import NodeAffinity + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeAffinity from a JSON string +node_affinity_instance = NodeAffinity.from_json(json) +# print the JSON string representation of the object +print(NodeAffinity.to_json()) + +# convert the object into a dict +node_affinity_dict = node_affinity_instance.to_dict() +# create an instance of NodeAffinity from a dict +node_affinity_form_dict = node_affinity.from_dict(node_affinity_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/NodeSelector.md b/sdks/python/client/docs/NodeSelector.md index 87f648ef6660..5652778d9bc7 100644 --- a/sdks/python/client/docs/NodeSelector.md +++ b/sdks/python/client/docs/NodeSelector.md @@ -3,11 +3,28 @@ A node selector represents the union of the results of one or more label queries over a set of nodes; that is, it represents the OR of the selectors represented by the node selector terms. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**node_selector_terms** | [**[NodeSelectorTerm]**](NodeSelectorTerm.md) | Required. A list of node selector terms. The terms are ORed. | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**node_selector_terms** | [**List[NodeSelectorTerm]**](NodeSelectorTerm.md) | Required. A list of node selector terms. The terms are ORed. | + +## Example + +```python +from argo_workflows.models.node_selector import NodeSelector + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeSelector from a JSON string +node_selector_instance = NodeSelector.from_json(json) +# print the JSON string representation of the object +print(NodeSelector.to_json()) +# convert the object into a dict +node_selector_dict = node_selector_instance.to_dict() +# create an instance of NodeSelector from a dict +node_selector_form_dict = node_selector.from_dict(node_selector_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/NodeSelectorRequirement.md b/sdks/python/client/docs/NodeSelectorRequirement.md index 7cc72f18cb0a..1ca9616cd08a 100644 --- a/sdks/python/client/docs/NodeSelectorRequirement.md +++ b/sdks/python/client/docs/NodeSelectorRequirement.md @@ -3,13 +3,30 @@ A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | The label key that the selector applies to. | **operator** | **str** | Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. Possible enum values: - `\"DoesNotExist\"` - `\"Exists\"` - `\"Gt\"` - `\"In\"` - `\"Lt\"` - `\"NotIn\"` | -**values** | **[str]** | An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**values** | **List[str]** | An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. | [optional] + +## Example + +```python +from argo_workflows.models.node_selector_requirement import NodeSelectorRequirement + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeSelectorRequirement from a JSON string +node_selector_requirement_instance = NodeSelectorRequirement.from_json(json) +# print the JSON string representation of the object +print(NodeSelectorRequirement.to_json()) +# convert the object into a dict +node_selector_requirement_dict = node_selector_requirement_instance.to_dict() +# create an instance of NodeSelectorRequirement from a dict +node_selector_requirement_form_dict = node_selector_requirement.from_dict(node_selector_requirement_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/NodeSelectorTerm.md b/sdks/python/client/docs/NodeSelectorTerm.md index 194919426451..6607bec4cf72 100644 --- a/sdks/python/client/docs/NodeSelectorTerm.md +++ b/sdks/python/client/docs/NodeSelectorTerm.md @@ -3,12 +3,29 @@ A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**match_expressions** | [**[NodeSelectorRequirement]**](NodeSelectorRequirement.md) | A list of node selector requirements by node's labels. | [optional] -**match_fields** | [**[NodeSelectorRequirement]**](NodeSelectorRequirement.md) | A list of node selector requirements by node's fields. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**match_expressions** | [**List[NodeSelectorRequirement]**](NodeSelectorRequirement.md) | A list of node selector requirements by node's labels. | [optional] +**match_fields** | [**List[NodeSelectorRequirement]**](NodeSelectorRequirement.md) | A list of node selector requirements by node's fields. | [optional] + +## Example + +```python +from argo_workflows.models.node_selector_term import NodeSelectorTerm + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeSelectorTerm from a JSON string +node_selector_term_instance = NodeSelectorTerm.from_json(json) +# print the JSON string representation of the object +print(NodeSelectorTerm.to_json()) +# convert the object into a dict +node_selector_term_dict = node_selector_term_instance.to_dict() +# create an instance of NodeSelectorTerm from a dict +node_selector_term_form_dict = node_selector_term.from_dict(node_selector_term_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ObjectFieldSelector.md b/sdks/python/client/docs/ObjectFieldSelector.md index 537ddfd4c823..9541a8a6596e 100644 --- a/sdks/python/client/docs/ObjectFieldSelector.md +++ b/sdks/python/client/docs/ObjectFieldSelector.md @@ -3,12 +3,29 @@ ObjectFieldSelector selects an APIVersioned field of an object. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**field_path** | **str** | Path of the field to select in the specified API version. | **api_version** | **str** | Version of the schema the FieldPath is written in terms of, defaults to \"v1\". | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**field_path** | **str** | Path of the field to select in the specified API version. | + +## Example + +```python +from argo_workflows.models.object_field_selector import ObjectFieldSelector + +# TODO update the JSON string below +json = "{}" +# create an instance of ObjectFieldSelector from a JSON string +object_field_selector_instance = ObjectFieldSelector.from_json(json) +# print the JSON string representation of the object +print(ObjectFieldSelector.to_json()) +# convert the object into a dict +object_field_selector_dict = object_field_selector_instance.to_dict() +# create an instance of ObjectFieldSelector from a dict +object_field_selector_form_dict = object_field_selector.from_dict(object_field_selector_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ObjectMeta.md b/sdks/python/client/docs/ObjectMeta.md index bd09367bbde5..af1a58ecaba4 100644 --- a/sdks/python/client/docs/ObjectMeta.md +++ b/sdks/python/client/docs/ObjectMeta.md @@ -3,26 +3,43 @@ ObjectMeta is metadata that all persisted resources must have, which includes all objects users must create. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**annotations** | **{str: (str,)}** | Annotations is an unstructured key value map stored with a resource that may be set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. More info: http://kubernetes.io/docs/user-guide/annotations | [optional] +**annotations** | **Dict[str, str]** | Annotations is an unstructured key value map stored with a resource that may be set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. More info: http://kubernetes.io/docs/user-guide/annotations | [optional] **cluster_name** | **str** | The name of the cluster which the object belongs to. This is used to distinguish resources with same name and namespace in different clusters. This field is not set anywhere right now and apiserver is going to ignore it if set in create or update request. | [optional] **creation_timestamp** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **deletion_grace_period_seconds** | **int** | Number of seconds allowed for this object to gracefully terminate before it will be removed from the system. Only set when deletionTimestamp is also set. May only be shortened. Read-only. | [optional] **deletion_timestamp** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] -**finalizers** | **[str]** | Must be empty before the object is deleted from the registry. Each entry is an identifier for the responsible component that will remove the entry from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only be removed. Finalizers may be processed and removed in any order. Order is NOT enforced because it introduces significant risk of stuck finalizers. finalizers is a shared field, any actor with permission can reorder it. If the finalizer list is processed in order, then this can lead to a situation in which the component responsible for the first finalizer in the list is waiting for a signal (field value, external system, or other) produced by a component responsible for a finalizer later in the list, resulting in a deadlock. Without enforced ordering finalizers are free to order amongst themselves and are not vulnerable to ordering changes in the list. | [optional] +**finalizers** | **List[str]** | Must be empty before the object is deleted from the registry. Each entry is an identifier for the responsible component that will remove the entry from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only be removed. Finalizers may be processed and removed in any order. Order is NOT enforced because it introduces significant risk of stuck finalizers. finalizers is a shared field, any actor with permission can reorder it. If the finalizer list is processed in order, then this can lead to a situation in which the component responsible for the first finalizer in the list is waiting for a signal (field value, external system, or other) produced by a component responsible for a finalizer later in the list, resulting in a deadlock. Without enforced ordering finalizers are free to order amongst themselves and are not vulnerable to ordering changes in the list. | [optional] **generate_name** | **str** | GenerateName is an optional prefix, used by the server, to generate a unique name ONLY IF the Name field has not been provided. If this field is used, the name returned to the client will be different than the name passed. This value will also be combined with a unique suffix. The provided value has the same validation rules as the Name field, and may be truncated by the length of the suffix required to make the value unique on the server. If this field is specified and the generated name exists, the server will NOT return a 409 - instead, it will either return 201 Created or 500 with Reason ServerTimeout indicating a unique name could not be found in the time allotted, and the client should retry (optionally after the time indicated in the Retry-After header). Applied only if Name is not specified. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#idempotency | [optional] **generation** | **int** | A sequence number representing a specific generation of the desired state. Populated by the system. Read-only. | [optional] -**labels** | **{str: (str,)}** | Map of string keys and values that can be used to organize and categorize (scope and select) objects. May match selectors of replication controllers and services. More info: http://kubernetes.io/docs/user-guide/labels | [optional] -**managed_fields** | [**[ManagedFieldsEntry]**](ManagedFieldsEntry.md) | ManagedFields maps workflow-id and version to the set of fields that are managed by that workflow. This is mostly for internal housekeeping, and users typically shouldn't need to set or understand this field. A workflow can be the user's name, a controller's name, or the name of a specific apply path like \"ci-cd\". The set of fields is always in the version that the workflow used when modifying the object. | [optional] +**labels** | **Dict[str, str]** | Map of string keys and values that can be used to organize and categorize (scope and select) objects. May match selectors of replication controllers and services. More info: http://kubernetes.io/docs/user-guide/labels | [optional] +**managed_fields** | [**List[ManagedFieldsEntry]**](ManagedFieldsEntry.md) | ManagedFields maps workflow-id and version to the set of fields that are managed by that workflow. This is mostly for internal housekeeping, and users typically shouldn't need to set or understand this field. A workflow can be the user's name, a controller's name, or the name of a specific apply path like \"ci-cd\". The set of fields is always in the version that the workflow used when modifying the object. | [optional] **name** | **str** | Name must be unique within a namespace. Is required when creating resources, although some resources may allow a client to request the generation of an appropriate name automatically. Name is primarily intended for creation idempotence and configuration definition. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/identifiers#names | [optional] **namespace** | **str** | Namespace defines the space within which each name must be unique. An empty namespace is equivalent to the \"default\" namespace, but \"default\" is the canonical representation. Not all objects are required to be scoped to a namespace - the value of this field for those objects will be empty. Must be a DNS_LABEL. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/namespaces | [optional] -**owner_references** | [**[OwnerReference]**](OwnerReference.md) | List of objects depended by this object. If ALL objects in the list have been deleted, this object will be garbage collected. If this object is managed by a controller, then an entry in this list will point to this controller, with the controller field set to true. There cannot be more than one managing controller. | [optional] +**owner_references** | [**List[OwnerReference]**](OwnerReference.md) | List of objects depended by this object. If ALL objects in the list have been deleted, this object will be garbage collected. If this object is managed by a controller, then an entry in this list will point to this controller, with the controller field set to true. There cannot be more than one managing controller. | [optional] **resource_version** | **str** | An opaque value that represents the internal version of this object that can be used by clients to determine when objects have changed. May be used for optimistic concurrency, change detection, and the watch operation on a resource or set of resources. Clients must treat these values as opaque and passed unmodified back to the server. They may only be valid for a particular resource or set of resources. Populated by the system. Read-only. Value must be treated as opaque by clients and . More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency | [optional] **self_link** | **str** | SelfLink is a URL representing this object. Populated by the system. Read-only. DEPRECATED Kubernetes will stop propagating this field in 1.20 release and the field is planned to be removed in 1.21 release. | [optional] **uid** | **str** | UID is the unique in time and space value for this object. It is typically generated by the server on successful creation of a resource and is not allowed to change on PUT operations. Populated by the system. Read-only. More info: http://kubernetes.io/docs/user-guide/identifiers#uids | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.object_meta import ObjectMeta + +# TODO update the JSON string below +json = "{}" +# create an instance of ObjectMeta from a JSON string +object_meta_instance = ObjectMeta.from_json(json) +# print the JSON string representation of the object +print(ObjectMeta.to_json()) + +# convert the object into a dict +object_meta_dict = object_meta_instance.to_dict() +# create an instance of ObjectMeta from a dict +object_meta_form_dict = object_meta.from_dict(object_meta_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ObjectReference.md b/sdks/python/client/docs/ObjectReference.md index 4e32f30faeb1..ca3aea3dce74 100644 --- a/sdks/python/client/docs/ObjectReference.md +++ b/sdks/python/client/docs/ObjectReference.md @@ -3,6 +3,7 @@ ObjectReference contains enough information to let you inspect or modify the referred object. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **api_version** | **str** | API version of the referent. | [optional] @@ -12,8 +13,24 @@ Name | Type | Description | Notes **namespace** | **str** | Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/ | [optional] **resource_version** | **str** | Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency | [optional] **uid** | **str** | UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.object_reference import ObjectReference + +# TODO update the JSON string below +json = "{}" +# create an instance of ObjectReference from a JSON string +object_reference_instance = ObjectReference.from_json(json) +# print the JSON string representation of the object +print(ObjectReference.to_json()) + +# convert the object into a dict +object_reference_dict = object_reference_instance.to_dict() +# create an instance of ObjectReference from a dict +object_reference_form_dict = object_reference.from_dict(object_reference_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/OwnerReference.md b/sdks/python/client/docs/OwnerReference.md index aadfce620221..cc713373400b 100644 --- a/sdks/python/client/docs/OwnerReference.md +++ b/sdks/python/client/docs/OwnerReference.md @@ -3,16 +3,33 @@ OwnerReference contains enough information to let you identify an owning object. An owning object must be in the same namespace as the dependent, or be cluster-scoped, so there is no namespace field. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **api_version** | **str** | API version of the referent. | +**block_owner_deletion** | **bool** | If true, AND if the owner has the \"foregroundDeletion\" finalizer, then the owner cannot be deleted from the key-value store until this reference is removed. Defaults to false. To set this field, a user needs \"delete\" permission of the owner, otherwise 422 (Unprocessable Entity) will be returned. | [optional] +**controller** | **bool** | If true, this reference points to the managing controller. | [optional] **kind** | **str** | Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | **name** | **str** | Name of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#names | **uid** | **str** | UID of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#uids | -**block_owner_deletion** | **bool** | If true, AND if the owner has the \"foregroundDeletion\" finalizer, then the owner cannot be deleted from the key-value store until this reference is removed. Defaults to false. To set this field, a user needs \"delete\" permission of the owner, otherwise 422 (Unprocessable Entity) will be returned. | [optional] -**controller** | **bool** | If true, this reference points to the managing controller. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.owner_reference import OwnerReference + +# TODO update the JSON string below +json = "{}" +# create an instance of OwnerReference from a JSON string +owner_reference_instance = OwnerReference.from_json(json) +# print the JSON string representation of the object +print(OwnerReference.to_json()) + +# convert the object into a dict +owner_reference_dict = owner_reference_instance.to_dict() +# create an instance of OwnerReference from a dict +owner_reference_form_dict = owner_reference.from_dict(owner_reference_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PersistentVolumeClaim.md b/sdks/python/client/docs/PersistentVolumeClaim.md index 968b596c72a5..12ac52a90917 100644 --- a/sdks/python/client/docs/PersistentVolumeClaim.md +++ b/sdks/python/client/docs/PersistentVolumeClaim.md @@ -3,6 +3,7 @@ PersistentVolumeClaim is a user's request for and claim to a persistent volume ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources | [optional] @@ -10,8 +11,24 @@ Name | Type | Description | Notes **metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] **spec** | [**PersistentVolumeClaimSpec**](PersistentVolumeClaimSpec.md) | | [optional] **status** | [**PersistentVolumeClaimStatus**](PersistentVolumeClaimStatus.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.persistent_volume_claim import PersistentVolumeClaim + +# TODO update the JSON string below +json = "{}" +# create an instance of PersistentVolumeClaim from a JSON string +persistent_volume_claim_instance = PersistentVolumeClaim.from_json(json) +# print the JSON string representation of the object +print(PersistentVolumeClaim.to_json()) + +# convert the object into a dict +persistent_volume_claim_dict = persistent_volume_claim_instance.to_dict() +# create an instance of PersistentVolumeClaim from a dict +persistent_volume_claim_form_dict = persistent_volume_claim.from_dict(persistent_volume_claim_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PersistentVolumeClaimCondition.md b/sdks/python/client/docs/PersistentVolumeClaimCondition.md index d84cc228076f..57cafe112eec 100644 --- a/sdks/python/client/docs/PersistentVolumeClaimCondition.md +++ b/sdks/python/client/docs/PersistentVolumeClaimCondition.md @@ -3,16 +3,33 @@ PersistentVolumeClaimCondition contails details about state of pvc ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**status** | **str** | | -**type** | **str** | Possible enum values: - `\"FileSystemResizePending\"` - controller resize is finished and a file system resize is pending on node - `\"Resizing\"` - a user trigger resize of pvc has been started | **last_probe_time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **last_transition_time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **message** | **str** | Human-readable message indicating details about last transition. | [optional] **reason** | **str** | Unique, this should be a short, machine understandable string that gives the reason for condition's last transition. If it reports \"ResizeStarted\" that means the underlying persistent volume is being resized. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**status** | **str** | | +**type** | **str** | Possible enum values: - `\"FileSystemResizePending\"` - controller resize is finished and a file system resize is pending on node - `\"Resizing\"` - a user trigger resize of pvc has been started | + +## Example + +```python +from argo_workflows.models.persistent_volume_claim_condition import PersistentVolumeClaimCondition + +# TODO update the JSON string below +json = "{}" +# create an instance of PersistentVolumeClaimCondition from a JSON string +persistent_volume_claim_condition_instance = PersistentVolumeClaimCondition.from_json(json) +# print the JSON string representation of the object +print(PersistentVolumeClaimCondition.to_json()) +# convert the object into a dict +persistent_volume_claim_condition_dict = persistent_volume_claim_condition_instance.to_dict() +# create an instance of PersistentVolumeClaimCondition from a dict +persistent_volume_claim_condition_form_dict = persistent_volume_claim_condition.from_dict(persistent_volume_claim_condition_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PersistentVolumeClaimSpec.md b/sdks/python/client/docs/PersistentVolumeClaimSpec.md index 0ca7bb137164..9ceae294aebd 100644 --- a/sdks/python/client/docs/PersistentVolumeClaimSpec.md +++ b/sdks/python/client/docs/PersistentVolumeClaimSpec.md @@ -3,9 +3,10 @@ PersistentVolumeClaimSpec describes the common attributes of storage devices and allows a Source for provider-specific attributes ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**access_modes** | **[str]** | AccessModes contains the desired access modes the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 | [optional] +**access_modes** | **List[str]** | AccessModes contains the desired access modes the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 | [optional] **data_source** | [**TypedLocalObjectReference**](TypedLocalObjectReference.md) | | [optional] **data_source_ref** | [**TypedLocalObjectReference**](TypedLocalObjectReference.md) | | [optional] **resources** | [**ResourceRequirements**](ResourceRequirements.md) | | [optional] @@ -13,8 +14,24 @@ Name | Type | Description | Notes **storage_class_name** | **str** | Name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 | [optional] **volume_mode** | **str** | volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec. | [optional] **volume_name** | **str** | VolumeName is the binding reference to the PersistentVolume backing this claim. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.persistent_volume_claim_spec import PersistentVolumeClaimSpec + +# TODO update the JSON string below +json = "{}" +# create an instance of PersistentVolumeClaimSpec from a JSON string +persistent_volume_claim_spec_instance = PersistentVolumeClaimSpec.from_json(json) +# print the JSON string representation of the object +print(PersistentVolumeClaimSpec.to_json()) + +# convert the object into a dict +persistent_volume_claim_spec_dict = persistent_volume_claim_spec_instance.to_dict() +# create an instance of PersistentVolumeClaimSpec from a dict +persistent_volume_claim_spec_form_dict = persistent_volume_claim_spec.from_dict(persistent_volume_claim_spec_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PersistentVolumeClaimStatus.md b/sdks/python/client/docs/PersistentVolumeClaimStatus.md index 97d0a474e36d..eec7dce00337 100644 --- a/sdks/python/client/docs/PersistentVolumeClaimStatus.md +++ b/sdks/python/client/docs/PersistentVolumeClaimStatus.md @@ -3,16 +3,33 @@ PersistentVolumeClaimStatus is the current status of a persistent volume claim. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**access_modes** | **[str]** | AccessModes contains the actual access modes the volume backing the PVC has. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 | [optional] -**allocated_resources** | **{str: (str,)}** | The storage resource within AllocatedResources tracks the capacity allocated to a PVC. It may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature. | [optional] -**capacity** | **{str: (str,)}** | Represents the actual resources of the underlying volume. | [optional] -**conditions** | [**[PersistentVolumeClaimCondition]**](PersistentVolumeClaimCondition.md) | Current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'ResizeStarted'. | [optional] +**access_modes** | **List[str]** | AccessModes contains the actual access modes the volume backing the PVC has. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 | [optional] +**allocated_resources** | **Dict[str, str]** | The storage resource within AllocatedResources tracks the capacity allocated to a PVC. It may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature. | [optional] +**capacity** | **Dict[str, str]** | Represents the actual resources of the underlying volume. | [optional] +**conditions** | [**List[PersistentVolumeClaimCondition]**](PersistentVolumeClaimCondition.md) | Current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'ResizeStarted'. | [optional] **phase** | **str** | Phase represents the current phase of PersistentVolumeClaim. Possible enum values: - `\"Bound\"` used for PersistentVolumeClaims that are bound - `\"Lost\"` used for PersistentVolumeClaims that lost their underlying PersistentVolume. The claim was bound to a PersistentVolume and this volume does not exist any longer and all data on it was lost. - `\"Pending\"` used for PersistentVolumeClaims that are not yet bound | [optional] **resize_status** | **str** | ResizeStatus stores status of resize operation. ResizeStatus is not set by default but when expansion is complete resizeStatus is set to empty string by resize controller or kubelet. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.persistent_volume_claim_status import PersistentVolumeClaimStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of PersistentVolumeClaimStatus from a JSON string +persistent_volume_claim_status_instance = PersistentVolumeClaimStatus.from_json(json) +# print the JSON string representation of the object +print(PersistentVolumeClaimStatus.to_json()) + +# convert the object into a dict +persistent_volume_claim_status_dict = persistent_volume_claim_status_instance.to_dict() +# create an instance of PersistentVolumeClaimStatus from a dict +persistent_volume_claim_status_form_dict = persistent_volume_claim_status.from_dict(persistent_volume_claim_status_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PersistentVolumeClaimTemplate.md b/sdks/python/client/docs/PersistentVolumeClaimTemplate.md index eabdfb70b898..478146cfb915 100644 --- a/sdks/python/client/docs/PersistentVolumeClaimTemplate.md +++ b/sdks/python/client/docs/PersistentVolumeClaimTemplate.md @@ -3,12 +3,29 @@ PersistentVolumeClaimTemplate is used to produce PersistentVolumeClaim objects as part of an EphemeralVolumeSource. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**spec** | [**PersistentVolumeClaimSpec**](PersistentVolumeClaimSpec.md) | | **metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**spec** | [**PersistentVolumeClaimSpec**](PersistentVolumeClaimSpec.md) | | + +## Example + +```python +from argo_workflows.models.persistent_volume_claim_template import PersistentVolumeClaimTemplate + +# TODO update the JSON string below +json = "{}" +# create an instance of PersistentVolumeClaimTemplate from a JSON string +persistent_volume_claim_template_instance = PersistentVolumeClaimTemplate.from_json(json) +# print the JSON string representation of the object +print(PersistentVolumeClaimTemplate.to_json()) +# convert the object into a dict +persistent_volume_claim_template_dict = persistent_volume_claim_template_instance.to_dict() +# create an instance of PersistentVolumeClaimTemplate from a dict +persistent_volume_claim_template_form_dict = persistent_volume_claim_template.from_dict(persistent_volume_claim_template_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PersistentVolumeClaimVolumeSource.md b/sdks/python/client/docs/PersistentVolumeClaimVolumeSource.md index 566ced71f884..061e803034f4 100644 --- a/sdks/python/client/docs/PersistentVolumeClaimVolumeSource.md +++ b/sdks/python/client/docs/PersistentVolumeClaimVolumeSource.md @@ -3,12 +3,29 @@ PersistentVolumeClaimVolumeSource references the user's PVC in the same namespace. This volume finds the bound PV and mounts that volume for the pod. A PersistentVolumeClaimVolumeSource is, essentially, a wrapper around another type of volume that is owned by someone else (the system). ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **claim_name** | **str** | ClaimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims | **read_only** | **bool** | Will force the ReadOnly setting in VolumeMounts. Default false. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.persistent_volume_claim_volume_source import PersistentVolumeClaimVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of PersistentVolumeClaimVolumeSource from a JSON string +persistent_volume_claim_volume_source_instance = PersistentVolumeClaimVolumeSource.from_json(json) +# print the JSON string representation of the object +print(PersistentVolumeClaimVolumeSource.to_json()) + +# convert the object into a dict +persistent_volume_claim_volume_source_dict = persistent_volume_claim_volume_source_instance.to_dict() +# create an instance of PersistentVolumeClaimVolumeSource from a dict +persistent_volume_claim_volume_source_form_dict = persistent_volume_claim_volume_source.from_dict(persistent_volume_claim_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PhotonPersistentDiskVolumeSource.md b/sdks/python/client/docs/PhotonPersistentDiskVolumeSource.md index 4456bd8e1941..e210adb46f2e 100644 --- a/sdks/python/client/docs/PhotonPersistentDiskVolumeSource.md +++ b/sdks/python/client/docs/PhotonPersistentDiskVolumeSource.md @@ -3,12 +3,29 @@ Represents a Photon Controller persistent disk resource. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**pd_id** | **str** | ID that identifies Photon Controller persistent disk | **fs_type** | **str** | Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**pd_id** | **str** | ID that identifies Photon Controller persistent disk | + +## Example + +```python +from argo_workflows.models.photon_persistent_disk_volume_source import PhotonPersistentDiskVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of PhotonPersistentDiskVolumeSource from a JSON string +photon_persistent_disk_volume_source_instance = PhotonPersistentDiskVolumeSource.from_json(json) +# print the JSON string representation of the object +print(PhotonPersistentDiskVolumeSource.to_json()) +# convert the object into a dict +photon_persistent_disk_volume_source_dict = photon_persistent_disk_volume_source_instance.to_dict() +# create an instance of PhotonPersistentDiskVolumeSource from a dict +photon_persistent_disk_volume_source_form_dict = photon_persistent_disk_volume_source.from_dict(photon_persistent_disk_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PodAffinity.md b/sdks/python/client/docs/PodAffinity.md index 339ed045a872..80c34c893c5a 100644 --- a/sdks/python/client/docs/PodAffinity.md +++ b/sdks/python/client/docs/PodAffinity.md @@ -3,12 +3,29 @@ Pod affinity is a group of inter pod affinity scheduling rules. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**preferred_during_scheduling_ignored_during_execution** | [**[WeightedPodAffinityTerm]**](WeightedPodAffinityTerm.md) | The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. | [optional] -**required_during_scheduling_ignored_during_execution** | [**[PodAffinityTerm]**](PodAffinityTerm.md) | If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**preferred_during_scheduling_ignored_during_execution** | [**List[WeightedPodAffinityTerm]**](WeightedPodAffinityTerm.md) | The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. | [optional] +**required_during_scheduling_ignored_during_execution** | [**List[PodAffinityTerm]**](PodAffinityTerm.md) | If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. | [optional] + +## Example + +```python +from argo_workflows.models.pod_affinity import PodAffinity + +# TODO update the JSON string below +json = "{}" +# create an instance of PodAffinity from a JSON string +pod_affinity_instance = PodAffinity.from_json(json) +# print the JSON string representation of the object +print(PodAffinity.to_json()) +# convert the object into a dict +pod_affinity_dict = pod_affinity_instance.to_dict() +# create an instance of PodAffinity from a dict +pod_affinity_form_dict = pod_affinity.from_dict(pod_affinity_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PodAffinityTerm.md b/sdks/python/client/docs/PodAffinityTerm.md index e4aa102f641b..0eb889022985 100644 --- a/sdks/python/client/docs/PodAffinityTerm.md +++ b/sdks/python/client/docs/PodAffinityTerm.md @@ -3,14 +3,31 @@ Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key matches that of any node on which a pod of the set of pods is running ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**topology_key** | **str** | This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. | **label_selector** | [**LabelSelector**](LabelSelector.md) | | [optional] **namespace_selector** | [**LabelSelector**](LabelSelector.md) | | [optional] -**namespaces** | **[str]** | namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means \"this pod's namespace\" | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**namespaces** | **List[str]** | namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means \"this pod's namespace\" | [optional] +**topology_key** | **str** | This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. | + +## Example + +```python +from argo_workflows.models.pod_affinity_term import PodAffinityTerm + +# TODO update the JSON string below +json = "{}" +# create an instance of PodAffinityTerm from a JSON string +pod_affinity_term_instance = PodAffinityTerm.from_json(json) +# print the JSON string representation of the object +print(PodAffinityTerm.to_json()) +# convert the object into a dict +pod_affinity_term_dict = pod_affinity_term_instance.to_dict() +# create an instance of PodAffinityTerm from a dict +pod_affinity_term_form_dict = pod_affinity_term.from_dict(pod_affinity_term_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PodAntiAffinity.md b/sdks/python/client/docs/PodAntiAffinity.md index f2b0ab3076b6..5f3a7283a45e 100644 --- a/sdks/python/client/docs/PodAntiAffinity.md +++ b/sdks/python/client/docs/PodAntiAffinity.md @@ -3,12 +3,29 @@ Pod anti affinity is a group of inter pod anti affinity scheduling rules. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**preferred_during_scheduling_ignored_during_execution** | [**[WeightedPodAffinityTerm]**](WeightedPodAffinityTerm.md) | The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. | [optional] -**required_during_scheduling_ignored_during_execution** | [**[PodAffinityTerm]**](PodAffinityTerm.md) | If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**preferred_during_scheduling_ignored_during_execution** | [**List[WeightedPodAffinityTerm]**](WeightedPodAffinityTerm.md) | The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding \"weight\" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. | [optional] +**required_during_scheduling_ignored_during_execution** | [**List[PodAffinityTerm]**](PodAffinityTerm.md) | If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. | [optional] + +## Example + +```python +from argo_workflows.models.pod_anti_affinity import PodAntiAffinity + +# TODO update the JSON string below +json = "{}" +# create an instance of PodAntiAffinity from a JSON string +pod_anti_affinity_instance = PodAntiAffinity.from_json(json) +# print the JSON string representation of the object +print(PodAntiAffinity.to_json()) +# convert the object into a dict +pod_anti_affinity_dict = pod_anti_affinity_instance.to_dict() +# create an instance of PodAntiAffinity from a dict +pod_anti_affinity_form_dict = pod_anti_affinity.from_dict(pod_anti_affinity_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PodDNSConfig.md b/sdks/python/client/docs/PodDNSConfig.md index 58fb6222a8dc..ccc8a8e5ec6b 100644 --- a/sdks/python/client/docs/PodDNSConfig.md +++ b/sdks/python/client/docs/PodDNSConfig.md @@ -3,13 +3,30 @@ PodDNSConfig defines the DNS parameters of a pod in addition to those generated from DNSPolicy. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**nameservers** | **[str]** | A list of DNS name server IP addresses. This will be appended to the base nameservers generated from DNSPolicy. Duplicated nameservers will be removed. | [optional] -**options** | [**[PodDNSConfigOption]**](PodDNSConfigOption.md) | A list of DNS resolver options. This will be merged with the base options generated from DNSPolicy. Duplicated entries will be removed. Resolution options given in Options will override those that appear in the base DNSPolicy. | [optional] -**searches** | **[str]** | A list of DNS search domains for host-name lookup. This will be appended to the base search paths generated from DNSPolicy. Duplicated search paths will be removed. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**nameservers** | **List[str]** | A list of DNS name server IP addresses. This will be appended to the base nameservers generated from DNSPolicy. Duplicated nameservers will be removed. | [optional] +**options** | [**List[PodDNSConfigOption]**](PodDNSConfigOption.md) | A list of DNS resolver options. This will be merged with the base options generated from DNSPolicy. Duplicated entries will be removed. Resolution options given in Options will override those that appear in the base DNSPolicy. | [optional] +**searches** | **List[str]** | A list of DNS search domains for host-name lookup. This will be appended to the base search paths generated from DNSPolicy. Duplicated search paths will be removed. | [optional] + +## Example + +```python +from argo_workflows.models.pod_dns_config import PodDNSConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of PodDNSConfig from a JSON string +pod_dns_config_instance = PodDNSConfig.from_json(json) +# print the JSON string representation of the object +print(PodDNSConfig.to_json()) +# convert the object into a dict +pod_dns_config_dict = pod_dns_config_instance.to_dict() +# create an instance of PodDNSConfig from a dict +pod_dns_config_form_dict = pod_dns_config.from_dict(pod_dns_config_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PodDNSConfigOption.md b/sdks/python/client/docs/PodDNSConfigOption.md index 987dd82b2051..285ec649b5a6 100644 --- a/sdks/python/client/docs/PodDNSConfigOption.md +++ b/sdks/python/client/docs/PodDNSConfigOption.md @@ -3,12 +3,29 @@ PodDNSConfigOption defines DNS resolver options of a pod. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Required. | [optional] **value** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.pod_dns_config_option import PodDNSConfigOption + +# TODO update the JSON string below +json = "{}" +# create an instance of PodDNSConfigOption from a JSON string +pod_dns_config_option_instance = PodDNSConfigOption.from_json(json) +# print the JSON string representation of the object +print(PodDNSConfigOption.to_json()) + +# convert the object into a dict +pod_dns_config_option_dict = pod_dns_config_option_instance.to_dict() +# create an instance of PodDNSConfigOption from a dict +pod_dns_config_option_form_dict = pod_dns_config_option.from_dict(pod_dns_config_option_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PodSecurityContext.md b/sdks/python/client/docs/PodSecurityContext.md index e146150529a2..6144179947ad 100644 --- a/sdks/python/client/docs/PodSecurityContext.md +++ b/sdks/python/client/docs/PodSecurityContext.md @@ -3,6 +3,7 @@ PodSecurityContext holds pod-level security attributes and common container settings. Some fields are also present in container.securityContext. Field values of container.securityContext take precedence over field values of PodSecurityContext. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **fs_group** | **int** | A special supplemental group that applies to all containers in a pod. Some volume types allow the Kubelet to change the ownership of that volume to be owned by the pod: 1. The owning GID will be the FSGroup 2. The setgid bit is set (new files created in the volume will be owned by FSGroup) 3. The permission bits are OR'd with rw-rw---- If unset, the Kubelet will not modify the ownership and permissions of any volume. Note that this field cannot be set when spec.os.name is windows. | [optional] @@ -12,11 +13,27 @@ Name | Type | Description | Notes **run_as_user** | **int** | The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows. | [optional] **se_linux_options** | [**SELinuxOptions**](SELinuxOptions.md) | | [optional] **seccomp_profile** | [**SeccompProfile**](SeccompProfile.md) | | [optional] -**supplemental_groups** | **[int]** | A list of groups applied to the first process run in each container, in addition to the container's primary GID. If unspecified, no groups will be added to any container. Note that this field cannot be set when spec.os.name is windows. | [optional] -**sysctls** | [**[Sysctl]**](Sysctl.md) | Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows. | [optional] +**supplemental_groups** | **List[int]** | A list of groups applied to the first process run in each container, in addition to the container's primary GID. If unspecified, no groups will be added to any container. Note that this field cannot be set when spec.os.name is windows. | [optional] +**sysctls** | [**List[Sysctl]**](Sysctl.md) | Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows. | [optional] **windows_options** | [**WindowsSecurityContextOptions**](WindowsSecurityContextOptions.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.pod_security_context import PodSecurityContext + +# TODO update the JSON string below +json = "{}" +# create an instance of PodSecurityContext from a JSON string +pod_security_context_instance = PodSecurityContext.from_json(json) +# print the JSON string representation of the object +print(PodSecurityContext.to_json()) + +# convert the object into a dict +pod_security_context_dict = pod_security_context_instance.to_dict() +# create an instance of PodSecurityContext from a dict +pod_security_context_form_dict = pod_security_context.from_dict(pod_security_context_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PortworxVolumeSource.md b/sdks/python/client/docs/PortworxVolumeSource.md index c5beb6abac1f..ae61a3fed525 100644 --- a/sdks/python/client/docs/PortworxVolumeSource.md +++ b/sdks/python/client/docs/PortworxVolumeSource.md @@ -3,13 +3,30 @@ PortworxVolumeSource represents a Portworx volume resource. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**volume_id** | **str** | VolumeID uniquely identifies a Portworx volume | **fs_type** | **str** | FSType represents the filesystem type to mount Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\". Implicitly inferred to be \"ext4\" if unspecified. | [optional] **read_only** | **bool** | Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**volume_id** | **str** | VolumeID uniquely identifies a Portworx volume | + +## Example + +```python +from argo_workflows.models.portworx_volume_source import PortworxVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of PortworxVolumeSource from a JSON string +portworx_volume_source_instance = PortworxVolumeSource.from_json(json) +# print the JSON string representation of the object +print(PortworxVolumeSource.to_json()) +# convert the object into a dict +portworx_volume_source_dict = portworx_volume_source_instance.to_dict() +# create an instance of PortworxVolumeSource from a dict +portworx_volume_source_form_dict = portworx_volume_source.from_dict(portworx_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PreferredSchedulingTerm.md b/sdks/python/client/docs/PreferredSchedulingTerm.md index b99dff6c665a..cdff7cbc6887 100644 --- a/sdks/python/client/docs/PreferredSchedulingTerm.md +++ b/sdks/python/client/docs/PreferredSchedulingTerm.md @@ -3,12 +3,29 @@ An empty preferred scheduling term matches all objects with implicit weight 0 (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op). ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **preference** | [**NodeSelectorTerm**](NodeSelectorTerm.md) | | **weight** | **int** | Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100. | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.preferred_scheduling_term import PreferredSchedulingTerm + +# TODO update the JSON string below +json = "{}" +# create an instance of PreferredSchedulingTerm from a JSON string +preferred_scheduling_term_instance = PreferredSchedulingTerm.from_json(json) +# print the JSON string representation of the object +print(PreferredSchedulingTerm.to_json()) + +# convert the object into a dict +preferred_scheduling_term_dict = preferred_scheduling_term_instance.to_dict() +# create an instance of PreferredSchedulingTerm from a dict +preferred_scheduling_term_form_dict = preferred_scheduling_term.from_dict(preferred_scheduling_term_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Probe.md b/sdks/python/client/docs/Probe.md index 91f895c32d84..e38958733b33 100644 --- a/sdks/python/client/docs/Probe.md +++ b/sdks/python/client/docs/Probe.md @@ -3,9 +3,10 @@ Probe describes a health check to be performed against a container to determine whether it is alive or ready to receive traffic. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**_exec** | [**ExecAction**](ExecAction.md) | | [optional] +**var_exec** | [**ExecAction**](ExecAction.md) | | [optional] **failure_threshold** | **int** | Minimum consecutive failures for the probe to be considered failed after having succeeded. Defaults to 3. Minimum value is 1. | [optional] **grpc** | [**GRPCAction**](GRPCAction.md) | | [optional] **http_get** | [**HTTPGetAction**](HTTPGetAction.md) | | [optional] @@ -15,8 +16,24 @@ Name | Type | Description | Notes **tcp_socket** | [**TCPSocketAction**](TCPSocketAction.md) | | [optional] **termination_grace_period_seconds** | **int** | Optional duration in seconds the pod needs to terminate gracefully upon probe failure. The grace period is the duration in seconds after the processes running in the pod are sent a termination signal and the time when the processes are forcibly halted with a kill signal. Set this value longer than the expected cleanup time for your process. If this value is nil, the pod's terminationGracePeriodSeconds will be used. Otherwise, this value overrides the value provided by the pod spec. Value must be non-negative integer. The value zero indicates stop immediately via the kill signal (no opportunity to shut down). This is a beta field and requires enabling ProbeTerminationGracePeriod feature gate. Minimum value is 1. spec.terminationGracePeriodSeconds is used if unset. | [optional] **timeout_seconds** | **int** | Number of seconds after which the probe times out. Defaults to 1 second. Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.probe import Probe + +# TODO update the JSON string below +json = "{}" +# create an instance of Probe from a JSON string +probe_instance = Probe.from_json(json) +# print the JSON string representation of the object +print(Probe.to_json()) + +# convert the object into a dict +probe_dict = probe_instance.to_dict() +# create an instance of Probe from a dict +probe_form_dict = probe.from_dict(probe_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ProjectedVolumeSource.md b/sdks/python/client/docs/ProjectedVolumeSource.md index ef201cb06090..356ba762a3eb 100644 --- a/sdks/python/client/docs/ProjectedVolumeSource.md +++ b/sdks/python/client/docs/ProjectedVolumeSource.md @@ -3,12 +3,29 @@ Represents a projected volume source ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **default_mode** | **int** | Mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. | [optional] -**sources** | [**[VolumeProjection]**](VolumeProjection.md) | list of volume projections | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**sources** | [**List[VolumeProjection]**](VolumeProjection.md) | list of volume projections | [optional] + +## Example + +```python +from argo_workflows.models.projected_volume_source import ProjectedVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of ProjectedVolumeSource from a JSON string +projected_volume_source_instance = ProjectedVolumeSource.from_json(json) +# print the JSON string representation of the object +print(ProjectedVolumeSource.to_json()) +# convert the object into a dict +projected_volume_source_dict = projected_volume_source_instance.to_dict() +# create an instance of ProjectedVolumeSource from a dict +projected_volume_source_form_dict = projected_volume_source.from_dict(projected_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/QuobyteVolumeSource.md b/sdks/python/client/docs/QuobyteVolumeSource.md index f5d3ddc6d667..8496d2274d27 100644 --- a/sdks/python/client/docs/QuobyteVolumeSource.md +++ b/sdks/python/client/docs/QuobyteVolumeSource.md @@ -3,16 +3,33 @@ Represents a Quobyte mount that lasts the lifetime of a pod. Quobyte volumes do not support ownership management or SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**registry** | **str** | Registry represents a single or multiple Quobyte Registry services specified as a string as host:port pair (multiple entries are separated with commas) which acts as the central registry for volumes | -**volume** | **str** | Volume is a string that references an already created Quobyte volume by name. | **group** | **str** | Group to map volume access to Default is no group | [optional] **read_only** | **bool** | ReadOnly here will force the Quobyte volume to be mounted with read-only permissions. Defaults to false. | [optional] +**registry** | **str** | Registry represents a single or multiple Quobyte Registry services specified as a string as host:port pair (multiple entries are separated with commas) which acts as the central registry for volumes | **tenant** | **str** | Tenant owning the given Quobyte volume in the Backend Used with dynamically provisioned Quobyte volumes, value is set by the plugin | [optional] **user** | **str** | User to map volume access to Defaults to serivceaccount user | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**volume** | **str** | Volume is a string that references an already created Quobyte volume by name. | + +## Example + +```python +from argo_workflows.models.quobyte_volume_source import QuobyteVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of QuobyteVolumeSource from a JSON string +quobyte_volume_source_instance = QuobyteVolumeSource.from_json(json) +# print the JSON string representation of the object +print(QuobyteVolumeSource.to_json()) +# convert the object into a dict +quobyte_volume_source_dict = quobyte_volume_source_instance.to_dict() +# create an instance of QuobyteVolumeSource from a dict +quobyte_volume_source_form_dict = quobyte_volume_source.from_dict(quobyte_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/RBDVolumeSource.md b/sdks/python/client/docs/RBDVolumeSource.md index 64324677561b..67d6dc225a23 100644 --- a/sdks/python/client/docs/RBDVolumeSource.md +++ b/sdks/python/client/docs/RBDVolumeSource.md @@ -3,18 +3,35 @@ Represents a Rados Block Device mount that lasts the lifetime of a pod. RBD volumes support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**image** | **str** | The rados image name. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | -**monitors** | **[str]** | A collection of Ceph monitors. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | **fs_type** | **str** | Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd | [optional] +**image** | **str** | The rados image name. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | **keyring** | **str** | Keyring is the path to key ring for RBDUser. Default is /etc/ceph/keyring. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | [optional] +**monitors** | **List[str]** | A collection of Ceph monitors. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | **pool** | **str** | The rados pool name. Default is rbd. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | [optional] **read_only** | **bool** | ReadOnly here will force the ReadOnly setting in VolumeMounts. Defaults to false. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | [optional] **secret_ref** | [**LocalObjectReference**](LocalObjectReference.md) | | [optional] **user** | **str** | The rados user name. Default is admin. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.rbd_volume_source import RBDVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of RBDVolumeSource from a JSON string +rbd_volume_source_instance = RBDVolumeSource.from_json(json) +# print the JSON string representation of the object +print(RBDVolumeSource.to_json()) + +# convert the object into a dict +rbd_volume_source_dict = rbd_volume_source_instance.to_dict() +# create an instance of RBDVolumeSource from a dict +rbd_volume_source_form_dict = rbd_volume_source.from_dict(rbd_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ResourceFieldSelector.md b/sdks/python/client/docs/ResourceFieldSelector.md index 306a7e20ead3..2755157cb4d6 100644 --- a/sdks/python/client/docs/ResourceFieldSelector.md +++ b/sdks/python/client/docs/ResourceFieldSelector.md @@ -3,13 +3,30 @@ ResourceFieldSelector represents container resources (cpu, memory) and their output format ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**resource** | **str** | Required: resource to select | **container_name** | **str** | Container name: required for volumes, optional for env vars | [optional] **divisor** | **str** | Quantity is a fixed-point representation of a number. It provides convenient marshaling/unmarshaling in JSON and YAML, in addition to String() and AsInt64() accessors. The serialization format is: <quantity> ::= <signedNumber><suffix> (Note that <suffix> may be empty, from the \"\" case in <decimalSI>.) <digit> ::= 0 | 1 | ... | 9 <digits> ::= <digit> | <digit><digits> <number> ::= <digits> | <digits>.<digits> | <digits>. | .<digits> <sign> ::= \"+\" | \"-\" <signedNumber> ::= <number> | <sign><number> <suffix> ::= <binarySI> | <decimalExponent> | <decimalSI> <binarySI> ::= Ki | Mi | Gi | Ti | Pi | Ei (International System of units; See: http://physics.nist.gov/cuu/Units/binary.html) <decimalSI> ::= m | \"\" | k | M | G | T | P | E (Note that 1024 = 1Ki but 1000 = 1k; I didn't choose the capitalization.) <decimalExponent> ::= \"e\" <signedNumber> | \"E\" <signedNumber> No matter which of the three exponent forms is used, no quantity may represent a number greater than 2^63-1 in magnitude, nor may it have more than 3 decimal places. Numbers larger or more precise will be capped or rounded up. (E.g.: 0.1m will rounded up to 1m.) This may be extended in the future if we require larger or smaller quantities. When a Quantity is parsed from a string, it will remember the type of suffix it had, and will use the same type again when it is serialized. Before serializing, Quantity will be put in \"canonical form\". This means that Exponent/suffix will be adjusted up or down (with a corresponding increase or decrease in Mantissa) such that: a. No precision is lost b. No fractional digits will be emitted c. The exponent (or suffix) is as large as possible. The sign will be omitted unless the number is negative. Examples: 1.5 will be serialized as \"1500m\" 1.5Gi will be serialized as \"1536Mi\" Note that the quantity will NEVER be internally represented by a floating point number. That is the whole point of this exercise. Non-canonical values will still parse as long as they are well formed, but will be re-emitted in their canonical form. (So always use canonical form, or don't diff.) This format is intended to make it difficult to use these numbers without writing some sort of special handling code in the hopes that that will cause implementors to also use a fixed point implementation. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**resource** | **str** | Required: resource to select | + +## Example + +```python +from argo_workflows.models.resource_field_selector import ResourceFieldSelector + +# TODO update the JSON string below +json = "{}" +# create an instance of ResourceFieldSelector from a JSON string +resource_field_selector_instance = ResourceFieldSelector.from_json(json) +# print the JSON string representation of the object +print(ResourceFieldSelector.to_json()) +# convert the object into a dict +resource_field_selector_dict = resource_field_selector_instance.to_dict() +# create an instance of ResourceFieldSelector from a dict +resource_field_selector_form_dict = resource_field_selector.from_dict(resource_field_selector_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ResourceRequirements.md b/sdks/python/client/docs/ResourceRequirements.md index 6fcd52166776..94d43f8584c9 100644 --- a/sdks/python/client/docs/ResourceRequirements.md +++ b/sdks/python/client/docs/ResourceRequirements.md @@ -3,12 +3,29 @@ ResourceRequirements describes the compute resource requirements. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**limits** | **{str: (str,)}** | Limits describes the maximum amount of compute resources allowed. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ | [optional] -**requests** | **{str: (str,)}** | Requests describes the minimum amount of compute resources required. If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, otherwise to an implementation-defined value. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**limits** | **Dict[str, str]** | Limits describes the maximum amount of compute resources allowed. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ | [optional] +**requests** | **Dict[str, str]** | Requests describes the minimum amount of compute resources required. If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, otherwise to an implementation-defined value. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ | [optional] + +## Example + +```python +from argo_workflows.models.resource_requirements import ResourceRequirements + +# TODO update the JSON string below +json = "{}" +# create an instance of ResourceRequirements from a JSON string +resource_requirements_instance = ResourceRequirements.from_json(json) +# print the JSON string representation of the object +print(ResourceRequirements.to_json()) +# convert the object into a dict +resource_requirements_dict = resource_requirements_instance.to_dict() +# create an instance of ResourceRequirements from a dict +resource_requirements_form_dict = resource_requirements.from_dict(resource_requirements_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SELinuxOptions.md b/sdks/python/client/docs/SELinuxOptions.md index afa2e5b06cda..a3f3355a4ae0 100644 --- a/sdks/python/client/docs/SELinuxOptions.md +++ b/sdks/python/client/docs/SELinuxOptions.md @@ -3,14 +3,31 @@ SELinuxOptions are the labels to be applied to the container ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **level** | **str** | Level is SELinux level label that applies to the container. | [optional] **role** | **str** | Role is a SELinux role label that applies to the container. | [optional] **type** | **str** | Type is a SELinux type label that applies to the container. | [optional] **user** | **str** | User is a SELinux user label that applies to the container. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.se_linux_options import SELinuxOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of SELinuxOptions from a JSON string +se_linux_options_instance = SELinuxOptions.from_json(json) +# print the JSON string representation of the object +print(SELinuxOptions.to_json()) + +# convert the object into a dict +se_linux_options_dict = se_linux_options_instance.to_dict() +# create an instance of SELinuxOptions from a dict +se_linux_options_form_dict = se_linux_options.from_dict(se_linux_options_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ScaleIOVolumeSource.md b/sdks/python/client/docs/ScaleIOVolumeSource.md index cacc9eee7bcd..df7b4d01b775 100644 --- a/sdks/python/client/docs/ScaleIOVolumeSource.md +++ b/sdks/python/client/docs/ScaleIOVolumeSource.md @@ -3,20 +3,37 @@ ScaleIOVolumeSource represents a persistent ScaleIO volume ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**gateway** | **str** | The host address of the ScaleIO API Gateway. | -**secret_ref** | [**LocalObjectReference**](LocalObjectReference.md) | | -**system** | **str** | The name of the storage system as configured in ScaleIO. | **fs_type** | **str** | Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Default is \"xfs\". | [optional] +**gateway** | **str** | The host address of the ScaleIO API Gateway. | **protection_domain** | **str** | The name of the ScaleIO Protection Domain for the configured storage. | [optional] **read_only** | **bool** | Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. | [optional] +**secret_ref** | [**LocalObjectReference**](LocalObjectReference.md) | | **ssl_enabled** | **bool** | Flag to enable/disable SSL communication with Gateway, default false | [optional] **storage_mode** | **str** | Indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. Default is ThinProvisioned. | [optional] **storage_pool** | **str** | The ScaleIO Storage Pool associated with the protection domain. | [optional] +**system** | **str** | The name of the storage system as configured in ScaleIO. | **volume_name** | **str** | The name of a volume already created in the ScaleIO system that is associated with this volume source. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.scale_io_volume_source import ScaleIOVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of ScaleIOVolumeSource from a JSON string +scale_io_volume_source_instance = ScaleIOVolumeSource.from_json(json) +# print the JSON string representation of the object +print(ScaleIOVolumeSource.to_json()) + +# convert the object into a dict +scale_io_volume_source_dict = scale_io_volume_source_instance.to_dict() +# create an instance of ScaleIOVolumeSource from a dict +scale_io_volume_source_form_dict = scale_io_volume_source.from_dict(scale_io_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SeccompProfile.md b/sdks/python/client/docs/SeccompProfile.md index 3855ebb15e6e..14e65d453a7c 100644 --- a/sdks/python/client/docs/SeccompProfile.md +++ b/sdks/python/client/docs/SeccompProfile.md @@ -3,12 +3,29 @@ SeccompProfile defines a pod/container's seccomp profile settings. Only one profile source may be set. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**type** | **str** | type indicates which kind of seccomp profile will be applied. Valid options are: Localhost - a profile defined in a file on the node should be used. RuntimeDefault - the container runtime default profile should be used. Unconfined - no profile should be applied. Possible enum values: - `\"Localhost\"` indicates a profile defined in a file on the node should be used. The file's location relative to <kubelet-root-dir>/seccomp. - `\"RuntimeDefault\"` represents the default container runtime seccomp profile. - `\"Unconfined\"` indicates no seccomp profile is applied (A.K.A. unconfined). | **localhost_profile** | **str** | localhostProfile indicates a profile defined in a file on the node should be used. The profile must be preconfigured on the node to work. Must be a descending path, relative to the kubelet's configured seccomp profile location. Must only be set if type is \"Localhost\". | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**type** | **str** | type indicates which kind of seccomp profile will be applied. Valid options are: Localhost - a profile defined in a file on the node should be used. RuntimeDefault - the container runtime default profile should be used. Unconfined - no profile should be applied. Possible enum values: - `\"Localhost\"` indicates a profile defined in a file on the node should be used. The file's location relative to <kubelet-root-dir>/seccomp. - `\"RuntimeDefault\"` represents the default container runtime seccomp profile. - `\"Unconfined\"` indicates no seccomp profile is applied (A.K.A. unconfined). | + +## Example + +```python +from argo_workflows.models.seccomp_profile import SeccompProfile + +# TODO update the JSON string below +json = "{}" +# create an instance of SeccompProfile from a JSON string +seccomp_profile_instance = SeccompProfile.from_json(json) +# print the JSON string representation of the object +print(SeccompProfile.to_json()) +# convert the object into a dict +seccomp_profile_dict = seccomp_profile_instance.to_dict() +# create an instance of SeccompProfile from a dict +seccomp_profile_form_dict = seccomp_profile.from_dict(seccomp_profile_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SecretEnvSource.md b/sdks/python/client/docs/SecretEnvSource.md index 8a463ccfa374..7f400c53b5cf 100644 --- a/sdks/python/client/docs/SecretEnvSource.md +++ b/sdks/python/client/docs/SecretEnvSource.md @@ -3,12 +3,29 @@ SecretEnvSource selects a Secret to populate the environment variables with. The contents of the target Secret's Data field will represent the key-value pairs as environment variables. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | [optional] **optional** | **bool** | Specify whether the Secret must be defined | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.secret_env_source import SecretEnvSource + +# TODO update the JSON string below +json = "{}" +# create an instance of SecretEnvSource from a JSON string +secret_env_source_instance = SecretEnvSource.from_json(json) +# print the JSON string representation of the object +print(SecretEnvSource.to_json()) + +# convert the object into a dict +secret_env_source_dict = secret_env_source_instance.to_dict() +# create an instance of SecretEnvSource from a dict +secret_env_source_form_dict = secret_env_source.from_dict(secret_env_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SecretKeySelector.md b/sdks/python/client/docs/SecretKeySelector.md index 6a754141450d..3bd5b276ccfb 100644 --- a/sdks/python/client/docs/SecretKeySelector.md +++ b/sdks/python/client/docs/SecretKeySelector.md @@ -3,13 +3,30 @@ SecretKeySelector selects a key of a Secret. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **key** | **str** | The key of the secret to select from. Must be a valid secret key. | **name** | **str** | Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | [optional] **optional** | **bool** | Specify whether the Secret or its key must be defined | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.secret_key_selector import SecretKeySelector + +# TODO update the JSON string below +json = "{}" +# create an instance of SecretKeySelector from a JSON string +secret_key_selector_instance = SecretKeySelector.from_json(json) +# print the JSON string representation of the object +print(SecretKeySelector.to_json()) + +# convert the object into a dict +secret_key_selector_dict = secret_key_selector_instance.to_dict() +# create an instance of SecretKeySelector from a dict +secret_key_selector_form_dict = secret_key_selector.from_dict(secret_key_selector_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SecretProjection.md b/sdks/python/client/docs/SecretProjection.md index f9a2d1bc3e89..407c03a165d1 100644 --- a/sdks/python/client/docs/SecretProjection.md +++ b/sdks/python/client/docs/SecretProjection.md @@ -3,13 +3,30 @@ Adapts a secret into a projected volume. The contents of the target Secret's Data field will be presented in a projected volume as files using the keys in the Data field as the file names. Note that this is identical to a secret volume source without the default mode. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[KeyToPath]**](KeyToPath.md) | If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'. | [optional] +**items** | [**List[KeyToPath]**](KeyToPath.md) | If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'. | [optional] **name** | **str** | Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | [optional] **optional** | **bool** | Specify whether the Secret or its key must be defined | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.secret_projection import SecretProjection + +# TODO update the JSON string below +json = "{}" +# create an instance of SecretProjection from a JSON string +secret_projection_instance = SecretProjection.from_json(json) +# print the JSON string representation of the object +print(SecretProjection.to_json()) + +# convert the object into a dict +secret_projection_dict = secret_projection_instance.to_dict() +# create an instance of SecretProjection from a dict +secret_projection_form_dict = secret_projection.from_dict(secret_projection_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SecretVolumeSource.md b/sdks/python/client/docs/SecretVolumeSource.md index ce9194bba8b0..30f8ded455aa 100644 --- a/sdks/python/client/docs/SecretVolumeSource.md +++ b/sdks/python/client/docs/SecretVolumeSource.md @@ -3,14 +3,31 @@ Adapts a Secret into a volume. The contents of the target Secret's Data field will be presented in a volume as files using the keys in the Data field as the file names. Secret volumes support ownership management and SELinux relabeling. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **default_mode** | **int** | Optional: mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. | [optional] -**items** | [**[KeyToPath]**](KeyToPath.md) | If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'. | [optional] +**items** | [**List[KeyToPath]**](KeyToPath.md) | If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'. | [optional] **optional** | **bool** | Specify whether the Secret or its keys must be defined | [optional] **secret_name** | **str** | Name of the secret in the pod's namespace to use. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.secret_volume_source import SecretVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of SecretVolumeSource from a JSON string +secret_volume_source_instance = SecretVolumeSource.from_json(json) +# print the JSON string representation of the object +print(SecretVolumeSource.to_json()) + +# convert the object into a dict +secret_volume_source_dict = secret_volume_source_instance.to_dict() +# create an instance of SecretVolumeSource from a dict +secret_volume_source_form_dict = secret_volume_source.from_dict(secret_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SecurityContext.md b/sdks/python/client/docs/SecurityContext.md index 7d1963f9e98e..866a2cf5ab14 100644 --- a/sdks/python/client/docs/SecurityContext.md +++ b/sdks/python/client/docs/SecurityContext.md @@ -3,6 +3,7 @@ SecurityContext holds security configuration that will be applied to a container. Some fields are present in both SecurityContext and PodSecurityContext. When both are set, the values in SecurityContext take precedence. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **allow_privilege_escalation** | **bool** | AllowPrivilegeEscalation controls whether a process can gain more privileges than its parent process. This bool directly controls if the no_new_privs flag will be set on the container process. AllowPrivilegeEscalation is true always when the container is: 1) run as Privileged 2) has CAP_SYS_ADMIN Note that this field cannot be set when spec.os.name is windows. | [optional] @@ -16,8 +17,24 @@ Name | Type | Description | Notes **se_linux_options** | [**SELinuxOptions**](SELinuxOptions.md) | | [optional] **seccomp_profile** | [**SeccompProfile**](SeccompProfile.md) | | [optional] **windows_options** | [**WindowsSecurityContextOptions**](WindowsSecurityContextOptions.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.security_context import SecurityContext + +# TODO update the JSON string below +json = "{}" +# create an instance of SecurityContext from a JSON string +security_context_instance = SecurityContext.from_json(json) +# print the JSON string representation of the object +print(SecurityContext.to_json()) + +# convert the object into a dict +security_context_dict = security_context_instance.to_dict() +# create an instance of SecurityContext from a dict +security_context_form_dict = security_context.from_dict(security_context_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SensorCreateSensorRequest.md b/sdks/python/client/docs/SensorCreateSensorRequest.md index 972dcf8a3a83..14f649ff99db 100644 --- a/sdks/python/client/docs/SensorCreateSensorRequest.md +++ b/sdks/python/client/docs/SensorCreateSensorRequest.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **create_options** | [**CreateOptions**](CreateOptions.md) | | [optional] **namespace** | **str** | | [optional] **sensor** | [**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.sensor_create_sensor_request import SensorCreateSensorRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of SensorCreateSensorRequest from a JSON string +sensor_create_sensor_request_instance = SensorCreateSensorRequest.from_json(json) +# print the JSON string representation of the object +print(SensorCreateSensorRequest.to_json()) + +# convert the object into a dict +sensor_create_sensor_request_dict = sensor_create_sensor_request_instance.to_dict() +# create an instance of SensorCreateSensorRequest from a dict +sensor_create_sensor_request_form_dict = sensor_create_sensor_request.from_dict(sensor_create_sensor_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SensorLogEntry.md b/sdks/python/client/docs/SensorLogEntry.md index 8faad1952da6..3169e2586d17 100644 --- a/sdks/python/client/docs/SensorLogEntry.md +++ b/sdks/python/client/docs/SensorLogEntry.md @@ -2,6 +2,7 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **dependency_name** | **str** | | [optional] @@ -12,8 +13,24 @@ Name | Type | Description | Notes **sensor_name** | **str** | | [optional] **time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **trigger_name** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.sensor_log_entry import SensorLogEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of SensorLogEntry from a JSON string +sensor_log_entry_instance = SensorLogEntry.from_json(json) +# print the JSON string representation of the object +print(SensorLogEntry.to_json()) + +# convert the object into a dict +sensor_log_entry_dict = sensor_log_entry_instance.to_dict() +# create an instance of SensorLogEntry from a dict +sensor_log_entry_form_dict = sensor_log_entry.from_dict(sensor_log_entry_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SensorSensorWatchEvent.md b/sdks/python/client/docs/SensorSensorWatchEvent.md index 5b7bd03a4de0..8d8b157f0511 100644 --- a/sdks/python/client/docs/SensorSensorWatchEvent.md +++ b/sdks/python/client/docs/SensorSensorWatchEvent.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **object** | [**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] **type** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.sensor_sensor_watch_event import SensorSensorWatchEvent + +# TODO update the JSON string below +json = "{}" +# create an instance of SensorSensorWatchEvent from a JSON string +sensor_sensor_watch_event_instance = SensorSensorWatchEvent.from_json(json) +# print the JSON string representation of the object +print(SensorSensorWatchEvent.to_json()) + +# convert the object into a dict +sensor_sensor_watch_event_dict = sensor_sensor_watch_event_instance.to_dict() +# create an instance of SensorSensorWatchEvent from a dict +sensor_sensor_watch_event_form_dict = sensor_sensor_watch_event.from_dict(sensor_sensor_watch_event_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SensorServiceApi.md b/sdks/python/client/docs/SensorServiceApi.md index e24fbcbda071..283af76bdc61 100644 --- a/sdks/python/client/docs/SensorServiceApi.md +++ b/sdks/python/client/docs/SensorServiceApi.md @@ -23,13 +23,12 @@ Method | HTTP request | Description * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import sensor_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor -from argo_workflows.model.sensor_create_sensor_request import SensorCreateSensorRequest +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.models.sensor_create_sensor_request import SensorCreateSensorRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -42,7 +41,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -50,1959 +49,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = sensor_service_api.SensorServiceApi(api_client) - namespace = "namespace_example" # str | - body = SensorCreateSensorRequest( - create_options=CreateOptions( - dry_run=[ - "dry_run_example", - ], - field_manager="field_manager_example", - field_validation="field_validation_example", - ), - namespace="namespace_example", - sensor=IoArgoprojEventsV1alpha1Sensor( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojEventsV1alpha1SensorSpec( - dependencies=[ - IoArgoprojEventsV1alpha1EventDependency( - event_name="event_name_example", - event_source_name="event_source_name_example", - filters=IoArgoprojEventsV1alpha1EventDependencyFilter( - context=IoArgoprojEventsV1alpha1EventContext( - datacontenttype="datacontenttype_example", - id="id_example", - source="source_example", - specversion="specversion_example", - subject="subject_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - type="type_example", - ), - data=[ - IoArgoprojEventsV1alpha1DataFilter( - comparator="comparator_example", - path="path_example", - template="template_example", - type="type_example", - value=[ - "value_example", - ], - ), - ], - data_logical_operator="data_logical_operator_example", - expr_logical_operator="expr_logical_operator_example", - exprs=[ - IoArgoprojEventsV1alpha1ExprFilter( - expr="expr_example", - fields=[ - IoArgoprojEventsV1alpha1PayloadField( - name="name_example", - path="path_example", - ), - ], - ), - ], - script="script_example", - time=IoArgoprojEventsV1alpha1TimeFilter( - start="start_example", - stop="stop_example", - ), - ), - filters_logical_operator="filters_logical_operator_example", - name="name_example", - transform=IoArgoprojEventsV1alpha1EventDependencyTransformer( - jq="jq_example", - script="script_example", - ), - ), - ], - error_on_failed_round=True, - event_bus_name="event_bus_name_example", - logging_fields={ - "key": "key_example", - }, - replicas=1, - revision_history_limit=1, - template=IoArgoprojEventsV1alpha1Template( - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metadata=IoArgoprojEventsV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - node_selector={ - "key": "key_example", - }, - priority=1, - priority_class_name="priority_class_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - triggers=[ - IoArgoprojEventsV1alpha1Trigger( - at_least_once=True, - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - policy=IoArgoprojEventsV1alpha1TriggerPolicy( - k8s=IoArgoprojEventsV1alpha1K8SResourcePolicy( - backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - error_on_backoff_timeout=True, - labels={ - "key": "key_example", - }, - ), - status=IoArgoprojEventsV1alpha1StatusPolicy( - allow=[ - 1, - ], - ), - ), - rate_limit=IoArgoprojEventsV1alpha1RateLimit( - requests_per_unit=1, - unit="unit_example", - ), - retry_strategy=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - template=IoArgoprojEventsV1alpha1TriggerTemplate( - argo_workflow=IoArgoprojEventsV1alpha1ArgoWorkflowTrigger( - args=[ - "args_example", - ], - operation="operation_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - source=IoArgoprojEventsV1alpha1ArtifactLocation( - configmap=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - file=IoArgoprojEventsV1alpha1FileArtifact( - path="path_example", - ), - git=IoArgoprojEventsV1alpha1GitArtifact( - branch="branch_example", - clone_directory="clone_directory_example", - creds=IoArgoprojEventsV1alpha1GitCreds( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - file_path="file_path_example", - insecure_ignore_host_key=True, - ref="ref_example", - remote=IoArgoprojEventsV1alpha1GitRemoteConfig( - name="name_example", - urls=[ - "urls_example", - ], - ), - ssh_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tag="tag_example", - url="url_example", - ), - inline="inline_example", - resource=IoArgoprojEventsV1alpha1Resource( - value='YQ==', - ), - s3=IoArgoprojEventsV1alpha1S3Artifact( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( - key="key_example", - name="name_example", - ), - ca_certificate=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1S3Filter( - prefix="prefix_example", - suffix="suffix_example", - ), - insecure=True, - metadata={ - "key": "key_example", - }, - region="region_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - url=IoArgoprojEventsV1alpha1URLArtifact( - path="path_example", - verify_cert=True, - ), - ), - ), - aws_lambda=IoArgoprojEventsV1alpha1AWSLambdaTrigger( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - function_name="function_name_example", - invocation_type="invocation_type_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - region="region_example", - role_arn="role_arn_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure_event_hubs=IoArgoprojEventsV1alpha1AzureEventHubsTrigger( - fqdn="fqdn_example", - hub_name="hub_name_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - shared_access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - shared_access_key_name=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure_service_bus=IoArgoprojEventsV1alpha1AzureServiceBusTrigger( - connection_string=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - queue_name="queue_name_example", - subscription_name="subscription_name_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic_name="topic_name_example", - ), - conditions="conditions_example", - conditions_reset=[ - IoArgoprojEventsV1alpha1ConditionsResetCriteria( - by_time=IoArgoprojEventsV1alpha1ConditionsResetByTime( - cron="cron_example", - timezone="timezone_example", - ), - ), - ], - custom=IoArgoprojEventsV1alpha1CustomTrigger( - cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - secure=True, - server_name_override="server_name_override_example", - server_url="server_url_example", - spec={ - "key": "key_example", - }, - ), - email=IoArgoprojEventsV1alpha1EmailTrigger( - body="body_example", - _from="_from_example", - host="host_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - port=1, - smtp_password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - subject="subject_example", - to=[ - "to_example", - ], - username="username_example", - ), - http=IoArgoprojEventsV1alpha1HTTPTrigger( - basic_auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - headers={ - "key": "key_example", - }, - method="method_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - secure_headers=[ - IoArgoprojEventsV1alpha1SecureHeader( - name="name_example", - value_from=IoArgoprojEventsV1alpha1ValueFromSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - timeout="timeout_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - url="url_example", - ), - k8s=IoArgoprojEventsV1alpha1StandardK8STrigger( - live_object=True, - operation="operation_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - patch_strategy="patch_strategy_example", - source=IoArgoprojEventsV1alpha1ArtifactLocation( - configmap=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - file=IoArgoprojEventsV1alpha1FileArtifact( - path="path_example", - ), - git=IoArgoprojEventsV1alpha1GitArtifact( - branch="branch_example", - clone_directory="clone_directory_example", - creds=IoArgoprojEventsV1alpha1GitCreds( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - file_path="file_path_example", - insecure_ignore_host_key=True, - ref="ref_example", - remote=IoArgoprojEventsV1alpha1GitRemoteConfig( - name="name_example", - urls=[ - "urls_example", - ], - ), - ssh_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tag="tag_example", - url="url_example", - ), - inline="inline_example", - resource=IoArgoprojEventsV1alpha1Resource( - value='YQ==', - ), - s3=IoArgoprojEventsV1alpha1S3Artifact( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( - key="key_example", - name="name_example", - ), - ca_certificate=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1S3Filter( - prefix="prefix_example", - suffix="suffix_example", - ), - insecure=True, - metadata={ - "key": "key_example", - }, - region="region_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - url=IoArgoprojEventsV1alpha1URLArtifact( - path="path_example", - verify_cert=True, - ), - ), - ), - kafka=IoArgoprojEventsV1alpha1KafkaTrigger( - compress=True, - flush_frequency=1, - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - partition=1, - partitioning_key="partitioning_key_example", - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - required_acks=1, - sasl=IoArgoprojEventsV1alpha1SASLConfig( - mechanism="mechanism_example", - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - user_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - schema_registry=IoArgoprojEventsV1alpha1SchemaRegistryConfig( - auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - schema_id=1, - url="url_example", - ), - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic="topic_example", - url="url_example", - version="version_example", - ), - log=IoArgoprojEventsV1alpha1LogTrigger( - interval_seconds="interval_seconds_example", - ), - name="name_example", - nats=IoArgoprojEventsV1alpha1NATSTrigger( - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - subject="subject_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - url="url_example", - ), - open_whisk=IoArgoprojEventsV1alpha1OpenWhiskTrigger( - action_name="action_name_example", - auth_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - host="host_example", - namespace="namespace_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - version="version_example", - ), - pulsar=IoArgoprojEventsV1alpha1PulsarTrigger( - auth_athenz_params={ - "key": "key_example", - }, - auth_athenz_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - auth_token_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - tls_allow_insecure_connection=True, - tls_trust_certs_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tls_validate_hostname=True, - topic="topic_example", - url="url_example", - ), - slack=IoArgoprojEventsV1alpha1SlackTrigger( - attachments="attachments_example", - blocks="blocks_example", - channel="channel_example", - message="message_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - sender=IoArgoprojEventsV1alpha1SlackSender( - icon="icon_example", - username="username_example", - ), - slack_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - thread=IoArgoprojEventsV1alpha1SlackThread( - broadcast_message_to_channel=True, - message_aggregation_key="message_aggregation_key_example", - ), - ), - ), - ), - ], - ), - status=IoArgoprojEventsV1alpha1SensorStatus( - status=IoArgoprojEventsV1alpha1Status( - conditions=[ - IoArgoprojEventsV1alpha1Condition( - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="type_example", - ), - ], - ), - ), - ), - ) # SensorCreateSensorRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.SensorServiceApi(api_client) + namespace = 'namespace_example' # str | + body = argo_workflows.SensorCreateSensorRequest() # SensorCreateSensorRequest | + try: api_response = api_instance.create_sensor(namespace, body) + print("The response of SensorServiceApi->create_sensor:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling SensorServiceApi->create_sensor: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **body** | [**SensorCreateSensorRequest**](SensorCreateSensorRequest.md)| | + **namespace** | **str**| | + **body** | [**SensorCreateSensorRequest**](SensorCreateSensorRequest.md)| | ### Return type @@ -2017,7 +84,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2028,7 +94,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **delete_sensor** -> bool, date, datetime, dict, float, int, list, str, none_type delete_sensor(namespace, name) +> object delete_sensor(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) @@ -2037,11 +103,10 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import sensor_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2054,7 +119,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2062,51 +127,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = sensor_service_api.SensorServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - delete_options_grace_period_seconds = "deleteOptions.gracePeriodSeconds_example" # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) - delete_options_preconditions_uid = "deleteOptions.preconditions.uid_example" # str | Specifies the target UID. +optional. (optional) - delete_options_preconditions_resource_version = "deleteOptions.preconditions.resourceVersion_example" # str | Specifies the target ResourceVersion +optional. (optional) + api_instance = argo_workflows.SensorServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + delete_options_grace_period_seconds = 'delete_options_grace_period_seconds_example' # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) + delete_options_preconditions_uid = 'delete_options_preconditions_uid_example' # str | Specifies the target UID. +optional. (optional) + delete_options_preconditions_resource_version = 'delete_options_preconditions_resource_version_example' # str | Specifies the target ResourceVersion +optional. (optional) delete_options_orphan_dependents = True # bool | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. (optional) - delete_options_propagation_policy = "deleteOptions.propagationPolicy_example" # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) - delete_options_dry_run = [ - "deleteOptions.dryRun_example", - ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) + delete_options_propagation_policy = 'delete_options_propagation_policy_example' # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) + delete_options_dry_run = ['delete_options_dry_run_example'] # List[str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.delete_sensor(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling SensorServiceApi->delete_sensor: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.delete_sensor(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + print("The response of SensorServiceApi->delete_sensor:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling SensorServiceApi->delete_sensor: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] - **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] - **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] - **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] - **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] - **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] + **namespace** | **str**| | + **name** | **str**| | + **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] + **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] + **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] + **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] + **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] + **delete_options_dry_run** | [**List[str]**](str.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] ### Return type -**bool, date, datetime, dict, float, int, list, str, none_type** +**object** ### Authorization @@ -2117,7 +174,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2128,7 +184,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_sensor** -> IoArgoprojEventsV1alpha1Sensor get_sensor(namespace, name) +> IoArgoprojEventsV1alpha1Sensor get_sensor(namespace, name, get_options_resource_version=get_options_resource_version) @@ -2137,12 +193,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import sensor_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2155,7 +210,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2163,35 +218,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = sensor_service_api.SensorServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - get_options_resource_version = "getOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.get_sensor(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling SensorServiceApi->get_sensor: %s\n" % e) + api_instance = argo_workflows.SensorServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + get_options_resource_version = 'get_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.get_sensor(namespace, name, get_options_resource_version=get_options_resource_version) + print("The response of SensorServiceApi->get_sensor:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling SensorServiceApi->get_sensor: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **namespace** | **str**| | + **name** | **str**| | + **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] ### Return type @@ -2206,7 +255,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2217,7 +265,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_sensors** -> IoArgoprojEventsV1alpha1SensorList list_sensors(namespace) +> IoArgoprojEventsV1alpha1SensorList list_sensors(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) @@ -2226,12 +274,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import sensor_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2244,7 +291,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2252,49 +299,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = sensor_service_api.SensorServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.SensorServiceApi(api_client) + namespace = 'namespace_example' # str | + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.list_sensors(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling SensorServiceApi->list_sensors: %s\n" % e) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.list_sensors(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) + print("The response of SensorServiceApi->list_sensors:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling SensorServiceApi->list_sensors: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **namespace** | **str**| | + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] ### Return type @@ -2309,7 +350,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2320,7 +360,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **sensors_logs** -> StreamResultOfSensorLogEntry sensors_logs(namespace) +> StreamResultOfSensorLogEntry sensors_logs(namespace, name=name, trigger_name=trigger_name, grep=grep, pod_log_options_container=pod_log_options_container, pod_log_options_follow=pod_log_options_follow, pod_log_options_previous=pod_log_options_previous, pod_log_options_since_seconds=pod_log_options_since_seconds, pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, pod_log_options_timestamps=pod_log_options_timestamps, pod_log_options_tail_lines=pod_log_options_tail_lines, pod_log_options_limit_bytes=pod_log_options_limit_bytes, pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend) @@ -2329,12 +369,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import sensor_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry +from argo_workflows.models.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2347,7 +386,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2355,57 +394,51 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = sensor_service_api.SensorServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | optional - only return entries for this sensor name. (optional) - trigger_name = "triggerName_example" # str | optional - only return entries for this trigger. (optional) - grep = "grep_example" # str | option - only return entries where `msg` contains this regular expressions. (optional) - pod_log_options_container = "podLogOptions.container_example" # str | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. (optional) + api_instance = argo_workflows.SensorServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | optional - only return entries for this sensor name. (optional) + trigger_name = 'trigger_name_example' # str | optional - only return entries for this trigger. (optional) + grep = 'grep_example' # str | option - only return entries where `msg` contains this regular expressions. (optional) + pod_log_options_container = 'pod_log_options_container_example' # str | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. (optional) pod_log_options_follow = True # bool | Follow the log stream of the pod. Defaults to false. +optional. (optional) pod_log_options_previous = True # bool | Return previous terminated container logs. Defaults to false. +optional. (optional) - pod_log_options_since_seconds = "podLogOptions.sinceSeconds_example" # str | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. (optional) - pod_log_options_since_time_seconds = "podLogOptions.sinceTime.seconds_example" # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) - pod_log_options_since_time_nanos = 1 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) + pod_log_options_since_seconds = 'pod_log_options_since_seconds_example' # str | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. (optional) + pod_log_options_since_time_seconds = 'pod_log_options_since_time_seconds_example' # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) + pod_log_options_since_time_nanos = 56 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) pod_log_options_timestamps = True # bool | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. (optional) - pod_log_options_tail_lines = "podLogOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) - pod_log_options_limit_bytes = "podLogOptions.limitBytes_example" # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) + pod_log_options_tail_lines = 'pod_log_options_tail_lines_example' # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) + pod_log_options_limit_bytes = 'pod_log_options_limit_bytes_example' # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) pod_log_options_insecure_skip_tls_verify_backend = True # bool | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.sensors_logs(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling SensorServiceApi->sensors_logs: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.sensors_logs(namespace, name=name, trigger_name=trigger_name, grep=grep, pod_log_options_container=pod_log_options_container, pod_log_options_follow=pod_log_options_follow, pod_log_options_previous=pod_log_options_previous, pod_log_options_since_seconds=pod_log_options_since_seconds, pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, pod_log_options_timestamps=pod_log_options_timestamps, pod_log_options_tail_lines=pod_log_options_tail_lines, pod_log_options_limit_bytes=pod_log_options_limit_bytes, pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend) + print("The response of SensorServiceApi->sensors_logs:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling SensorServiceApi->sensors_logs: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| optional - only return entries for this sensor name. | [optional] - **trigger_name** | **str**| optional - only return entries for this trigger. | [optional] - **grep** | **str**| option - only return entries where `msg` contains this regular expressions. | [optional] - **pod_log_options_container** | **str**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] - **pod_log_options_follow** | **bool**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] - **pod_log_options_previous** | **bool**| Return previous terminated container logs. Defaults to false. +optional. | [optional] - **pod_log_options_since_seconds** | **str**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] - **pod_log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] - **pod_log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] - **pod_log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **pod_log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] - **pod_log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] - **pod_log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **namespace** | **str**| | + **name** | **str**| optional - only return entries for this sensor name. | [optional] + **trigger_name** | **str**| optional - only return entries for this trigger. | [optional] + **grep** | **str**| option - only return entries where `msg` contains this regular expressions. | [optional] + **pod_log_options_container** | **str**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] + **pod_log_options_follow** | **bool**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] + **pod_log_options_previous** | **bool**| Return previous terminated container logs. Defaults to false. +optional. | [optional] + **pod_log_options_since_seconds** | **str**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] + **pod_log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] + **pod_log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] + **pod_log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] + **pod_log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **pod_log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] + **pod_log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] ### Return type @@ -2420,7 +453,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -2440,13 +472,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import sensor_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor -from argo_workflows.model.sensor_update_sensor_request import SensorUpdateSensorRequest +from argo_workflows.models.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.models.sensor_update_sensor_request import SensorUpdateSensorRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -2459,7 +490,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -2467,1955 +498,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = sensor_service_api.SensorServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = SensorUpdateSensorRequest( - name="name_example", - namespace="namespace_example", - sensor=IoArgoprojEventsV1alpha1Sensor( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojEventsV1alpha1SensorSpec( - dependencies=[ - IoArgoprojEventsV1alpha1EventDependency( - event_name="event_name_example", - event_source_name="event_source_name_example", - filters=IoArgoprojEventsV1alpha1EventDependencyFilter( - context=IoArgoprojEventsV1alpha1EventContext( - datacontenttype="datacontenttype_example", - id="id_example", - source="source_example", - specversion="specversion_example", - subject="subject_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - type="type_example", - ), - data=[ - IoArgoprojEventsV1alpha1DataFilter( - comparator="comparator_example", - path="path_example", - template="template_example", - type="type_example", - value=[ - "value_example", - ], - ), - ], - data_logical_operator="data_logical_operator_example", - expr_logical_operator="expr_logical_operator_example", - exprs=[ - IoArgoprojEventsV1alpha1ExprFilter( - expr="expr_example", - fields=[ - IoArgoprojEventsV1alpha1PayloadField( - name="name_example", - path="path_example", - ), - ], - ), - ], - script="script_example", - time=IoArgoprojEventsV1alpha1TimeFilter( - start="start_example", - stop="stop_example", - ), - ), - filters_logical_operator="filters_logical_operator_example", - name="name_example", - transform=IoArgoprojEventsV1alpha1EventDependencyTransformer( - jq="jq_example", - script="script_example", - ), - ), - ], - error_on_failed_round=True, - event_bus_name="event_bus_name_example", - logging_fields={ - "key": "key_example", - }, - replicas=1, - revision_history_limit=1, - template=IoArgoprojEventsV1alpha1Template( - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metadata=IoArgoprojEventsV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - node_selector={ - "key": "key_example", - }, - priority=1, - priority_class_name="priority_class_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - triggers=[ - IoArgoprojEventsV1alpha1Trigger( - at_least_once=True, - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - policy=IoArgoprojEventsV1alpha1TriggerPolicy( - k8s=IoArgoprojEventsV1alpha1K8SResourcePolicy( - backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - error_on_backoff_timeout=True, - labels={ - "key": "key_example", - }, - ), - status=IoArgoprojEventsV1alpha1StatusPolicy( - allow=[ - 1, - ], - ), - ), - rate_limit=IoArgoprojEventsV1alpha1RateLimit( - requests_per_unit=1, - unit="unit_example", - ), - retry_strategy=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - template=IoArgoprojEventsV1alpha1TriggerTemplate( - argo_workflow=IoArgoprojEventsV1alpha1ArgoWorkflowTrigger( - args=[ - "args_example", - ], - operation="operation_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - source=IoArgoprojEventsV1alpha1ArtifactLocation( - configmap=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - file=IoArgoprojEventsV1alpha1FileArtifact( - path="path_example", - ), - git=IoArgoprojEventsV1alpha1GitArtifact( - branch="branch_example", - clone_directory="clone_directory_example", - creds=IoArgoprojEventsV1alpha1GitCreds( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - file_path="file_path_example", - insecure_ignore_host_key=True, - ref="ref_example", - remote=IoArgoprojEventsV1alpha1GitRemoteConfig( - name="name_example", - urls=[ - "urls_example", - ], - ), - ssh_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tag="tag_example", - url="url_example", - ), - inline="inline_example", - resource=IoArgoprojEventsV1alpha1Resource( - value='YQ==', - ), - s3=IoArgoprojEventsV1alpha1S3Artifact( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( - key="key_example", - name="name_example", - ), - ca_certificate=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1S3Filter( - prefix="prefix_example", - suffix="suffix_example", - ), - insecure=True, - metadata={ - "key": "key_example", - }, - region="region_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - url=IoArgoprojEventsV1alpha1URLArtifact( - path="path_example", - verify_cert=True, - ), - ), - ), - aws_lambda=IoArgoprojEventsV1alpha1AWSLambdaTrigger( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - function_name="function_name_example", - invocation_type="invocation_type_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - region="region_example", - role_arn="role_arn_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure_event_hubs=IoArgoprojEventsV1alpha1AzureEventHubsTrigger( - fqdn="fqdn_example", - hub_name="hub_name_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - shared_access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - shared_access_key_name=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure_service_bus=IoArgoprojEventsV1alpha1AzureServiceBusTrigger( - connection_string=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - queue_name="queue_name_example", - subscription_name="subscription_name_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic_name="topic_name_example", - ), - conditions="conditions_example", - conditions_reset=[ - IoArgoprojEventsV1alpha1ConditionsResetCriteria( - by_time=IoArgoprojEventsV1alpha1ConditionsResetByTime( - cron="cron_example", - timezone="timezone_example", - ), - ), - ], - custom=IoArgoprojEventsV1alpha1CustomTrigger( - cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - secure=True, - server_name_override="server_name_override_example", - server_url="server_url_example", - spec={ - "key": "key_example", - }, - ), - email=IoArgoprojEventsV1alpha1EmailTrigger( - body="body_example", - _from="_from_example", - host="host_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - port=1, - smtp_password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - subject="subject_example", - to=[ - "to_example", - ], - username="username_example", - ), - http=IoArgoprojEventsV1alpha1HTTPTrigger( - basic_auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - headers={ - "key": "key_example", - }, - method="method_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - secure_headers=[ - IoArgoprojEventsV1alpha1SecureHeader( - name="name_example", - value_from=IoArgoprojEventsV1alpha1ValueFromSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - timeout="timeout_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - url="url_example", - ), - k8s=IoArgoprojEventsV1alpha1StandardK8STrigger( - live_object=True, - operation="operation_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - patch_strategy="patch_strategy_example", - source=IoArgoprojEventsV1alpha1ArtifactLocation( - configmap=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - file=IoArgoprojEventsV1alpha1FileArtifact( - path="path_example", - ), - git=IoArgoprojEventsV1alpha1GitArtifact( - branch="branch_example", - clone_directory="clone_directory_example", - creds=IoArgoprojEventsV1alpha1GitCreds( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - file_path="file_path_example", - insecure_ignore_host_key=True, - ref="ref_example", - remote=IoArgoprojEventsV1alpha1GitRemoteConfig( - name="name_example", - urls=[ - "urls_example", - ], - ), - ssh_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tag="tag_example", - url="url_example", - ), - inline="inline_example", - resource=IoArgoprojEventsV1alpha1Resource( - value='YQ==', - ), - s3=IoArgoprojEventsV1alpha1S3Artifact( - access_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( - key="key_example", - name="name_example", - ), - ca_certificate=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint="endpoint_example", - events=[ - "events_example", - ], - filter=IoArgoprojEventsV1alpha1S3Filter( - prefix="prefix_example", - suffix="suffix_example", - ), - insecure=True, - metadata={ - "key": "key_example", - }, - region="region_example", - secret_key=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - url=IoArgoprojEventsV1alpha1URLArtifact( - path="path_example", - verify_cert=True, - ), - ), - ), - kafka=IoArgoprojEventsV1alpha1KafkaTrigger( - compress=True, - flush_frequency=1, - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - partition=1, - partitioning_key="partitioning_key_example", - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - required_acks=1, - sasl=IoArgoprojEventsV1alpha1SASLConfig( - mechanism="mechanism_example", - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - user_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - schema_registry=IoArgoprojEventsV1alpha1SchemaRegistryConfig( - auth=IoArgoprojEventsV1alpha1BasicAuth( - password=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - schema_id=1, - url="url_example", - ), - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - topic="topic_example", - url="url_example", - version="version_example", - ), - log=IoArgoprojEventsV1alpha1LogTrigger( - interval_seconds="interval_seconds_example", - ), - name="name_example", - nats=IoArgoprojEventsV1alpha1NATSTrigger( - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - subject="subject_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - url="url_example", - ), - open_whisk=IoArgoprojEventsV1alpha1OpenWhiskTrigger( - action_name="action_name_example", - auth_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - host="host_example", - namespace="namespace_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - version="version_example", - ), - pulsar=IoArgoprojEventsV1alpha1PulsarTrigger( - auth_athenz_params={ - "key": "key_example", - }, - auth_athenz_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - auth_token_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( - int64_val="int64_val_example", - str_val="str_val_example", - type="type_example", - ), - factor=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - jitter=IoArgoprojEventsV1alpha1Amount( - value='YQ==', - ), - steps=1, - ), - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - tls=IoArgoprojEventsV1alpha1TLSConfig( - ca_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - insecure_skip_verify=True, - ), - tls_allow_insecure_connection=True, - tls_trust_certs_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - tls_validate_hostname=True, - topic="topic_example", - url="url_example", - ), - slack=IoArgoprojEventsV1alpha1SlackTrigger( - attachments="attachments_example", - blocks="blocks_example", - channel="channel_example", - message="message_example", - parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( - dest="dest_example", - operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( - context_key="context_key_example", - context_template="context_template_example", - data_key="data_key_example", - data_template="data_template_example", - dependency_name="dependency_name_example", - use_raw_data=True, - value="value_example", - ), - ), - ], - sender=IoArgoprojEventsV1alpha1SlackSender( - icon="icon_example", - username="username_example", - ), - slack_token=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - thread=IoArgoprojEventsV1alpha1SlackThread( - broadcast_message_to_channel=True, - message_aggregation_key="message_aggregation_key_example", - ), - ), - ), - ), - ], - ), - status=IoArgoprojEventsV1alpha1SensorStatus( - status=IoArgoprojEventsV1alpha1Status( - conditions=[ - IoArgoprojEventsV1alpha1Condition( - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="type_example", - ), - ], - ), - ), - ), - ) # SensorUpdateSensorRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.SensorServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.SensorUpdateSensorRequest() # SensorUpdateSensorRequest | + try: api_response = api_instance.update_sensor(namespace, name, body) + print("The response of SensorServiceApi->update_sensor:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling SensorServiceApi->update_sensor: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**SensorUpdateSensorRequest**](SensorUpdateSensorRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**SensorUpdateSensorRequest**](SensorUpdateSensorRequest.md)| | ### Return type @@ -4430,7 +535,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -4441,7 +545,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **watch_sensors** -> StreamResultOfSensorSensorWatchEvent watch_sensors(namespace) +> StreamResultOfSensorSensorWatchEvent watch_sensors(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) @@ -4450,12 +554,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import sensor_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_sensor_sensor_watch_event import StreamResultOfSensorSensorWatchEvent +from argo_workflows.models.stream_result_of_sensor_sensor_watch_event import StreamResultOfSensorSensorWatchEvent +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -4468,7 +571,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -4476,49 +579,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = sensor_service_api.SensorServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.SensorServiceApi(api_client) + namespace = 'namespace_example' # str | + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.watch_sensors(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling SensorServiceApi->watch_sensors: %s\n" % e) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.watch_sensors(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) + print("The response of SensorServiceApi->watch_sensors:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling SensorServiceApi->watch_sensors: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **namespace** | **str**| | + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] ### Return type @@ -4533,7 +630,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/docs/SensorUpdateSensorRequest.md b/sdks/python/client/docs/SensorUpdateSensorRequest.md index 4912ac800cfb..e2f2d21f4c37 100644 --- a/sdks/python/client/docs/SensorUpdateSensorRequest.md +++ b/sdks/python/client/docs/SensorUpdateSensorRequest.md @@ -2,13 +2,30 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **namespace** | **str** | | [optional] **sensor** | [**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.sensor_update_sensor_request import SensorUpdateSensorRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of SensorUpdateSensorRequest from a JSON string +sensor_update_sensor_request_instance = SensorUpdateSensorRequest.from_json(json) +# print the JSON string representation of the object +print(SensorUpdateSensorRequest.to_json()) + +# convert the object into a dict +sensor_update_sensor_request_dict = sensor_update_sensor_request_instance.to_dict() +# create an instance of SensorUpdateSensorRequest from a dict +sensor_update_sensor_request_form_dict = sensor_update_sensor_request.from_dict(sensor_update_sensor_request_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ServiceAccountTokenProjection.md b/sdks/python/client/docs/ServiceAccountTokenProjection.md index 8a5f503094f8..3fb2d2787882 100644 --- a/sdks/python/client/docs/ServiceAccountTokenProjection.md +++ b/sdks/python/client/docs/ServiceAccountTokenProjection.md @@ -3,13 +3,30 @@ ServiceAccountTokenProjection represents a projected service account token volume. This projection can be used to insert a service account token into the pods runtime filesystem for use against APIs (Kubernetes API Server or otherwise). ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**path** | **str** | Path is the path relative to the mount point of the file to project the token into. | **audience** | **str** | Audience is the intended audience of the token. A recipient of a token must identify itself with an identifier specified in the audience of the token, and otherwise should reject the token. The audience defaults to the identifier of the apiserver. | [optional] **expiration_seconds** | **int** | ExpirationSeconds is the requested duration of validity of the service account token. As the token approaches expiration, the kubelet volume plugin will proactively rotate the service account token. The kubelet will start trying to rotate the token if the token is older than 80 percent of its time to live or if the token is older than 24 hours.Defaults to 1 hour and must be at least 10 minutes. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**path** | **str** | Path is the path relative to the mount point of the file to project the token into. | + +## Example + +```python +from argo_workflows.models.service_account_token_projection import ServiceAccountTokenProjection + +# TODO update the JSON string below +json = "{}" +# create an instance of ServiceAccountTokenProjection from a JSON string +service_account_token_projection_instance = ServiceAccountTokenProjection.from_json(json) +# print the JSON string representation of the object +print(ServiceAccountTokenProjection.to_json()) +# convert the object into a dict +service_account_token_projection_dict = service_account_token_projection_instance.to_dict() +# create an instance of ServiceAccountTokenProjection from a dict +service_account_token_projection_form_dict = service_account_token_projection.from_dict(service_account_token_projection_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ServicePort.md b/sdks/python/client/docs/ServicePort.md index 77b79f348915..af8fdf7f2a76 100644 --- a/sdks/python/client/docs/ServicePort.md +++ b/sdks/python/client/docs/ServicePort.md @@ -3,16 +3,33 @@ ServicePort contains information on service's port. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**port** | **int** | The port that will be exposed by this service. | **app_protocol** | **str** | The application protocol for this port. This field follows standard Kubernetes label syntax. Un-prefixed names are reserved for IANA standard service names (as per RFC-6335 and http://www.iana.org/assignments/service-names). Non-standard protocols should use prefixed names such as mycompany.com/my-custom-protocol. | [optional] **name** | **str** | The name of this port within the service. This must be a DNS_LABEL. All ports within a ServiceSpec must have unique names. When considering the endpoints for a Service, this must match the 'name' field in the EndpointPort. Optional if only one ServicePort is defined on this service. | [optional] **node_port** | **int** | The port on each node on which this service is exposed when type is NodePort or LoadBalancer. Usually assigned by the system. If a value is specified, in-range, and not in use it will be used, otherwise the operation will fail. If not specified, a port will be allocated if this Service requires one. If this field is specified when creating a Service which does not need it, creation will fail. This field will be wiped when updating a Service to no longer need it (e.g. changing type from NodePort to ClusterIP). More info: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport | [optional] +**port** | **int** | The port that will be exposed by this service. | **protocol** | **str** | The IP protocol for this port. Supports \"TCP\", \"UDP\", and \"SCTP\". Default is TCP. Possible enum values: - `\"SCTP\"` is the SCTP protocol. - `\"TCP\"` is the TCP protocol. - `\"UDP\"` is the UDP protocol. | [optional] **target_port** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.service_port import ServicePort + +# TODO update the JSON string below +json = "{}" +# create an instance of ServicePort from a JSON string +service_port_instance = ServicePort.from_json(json) +# print the JSON string representation of the object +print(ServicePort.to_json()) + +# convert the object into a dict +service_port_dict = service_port_instance.to_dict() +# create an instance of ServicePort from a dict +service_port_form_dict = service_port.from_dict(service_port_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/StatusCause.md b/sdks/python/client/docs/StatusCause.md index 3f3260473e13..8a484a4b9843 100644 --- a/sdks/python/client/docs/StatusCause.md +++ b/sdks/python/client/docs/StatusCause.md @@ -3,13 +3,30 @@ StatusCause provides more information about an api.Status failure, including cases when multiple errors are encountered. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **field** | **str** | The field of the resource that has caused this error, as named by its JSON serialization. May include dot and postfix notation for nested attributes. Arrays are zero-indexed. Fields may appear more than once in an array of causes due to fields having multiple errors. Optional. Examples: \"name\" - the field \"name\" on the current resource \"items[0].name\" - the field \"name\" on the first array entry in \"items\" | [optional] **message** | **str** | A human-readable description of the cause of the error. This field may be presented as-is to a reader. | [optional] **reason** | **str** | A machine-readable description of the cause of the error. If this value is empty there is no information available. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.status_cause import StatusCause + +# TODO update the JSON string below +json = "{}" +# create an instance of StatusCause from a JSON string +status_cause_instance = StatusCause.from_json(json) +# print the JSON string representation of the object +print(StatusCause.to_json()) + +# convert the object into a dict +status_cause_dict = status_cause_instance.to_dict() +# create an instance of StatusCause from a dict +status_cause_form_dict = status_cause.from_dict(status_cause_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/StorageOSVolumeSource.md b/sdks/python/client/docs/StorageOSVolumeSource.md index 25fb14c26ee3..51652aa23c08 100644 --- a/sdks/python/client/docs/StorageOSVolumeSource.md +++ b/sdks/python/client/docs/StorageOSVolumeSource.md @@ -3,6 +3,7 @@ Represents a StorageOS persistent volume resource. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **fs_type** | **str** | Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. | [optional] @@ -10,8 +11,24 @@ Name | Type | Description | Notes **secret_ref** | [**LocalObjectReference**](LocalObjectReference.md) | | [optional] **volume_name** | **str** | VolumeName is the human-readable name of the StorageOS volume. Volume names are only unique within a namespace. | [optional] **volume_namespace** | **str** | VolumeNamespace specifies the scope of the volume within StorageOS. If no namespace is specified then the Pod's namespace will be used. This allows the Kubernetes name scoping to be mirrored within StorageOS for tighter integration. Set VolumeName to any name to override the default behaviour. Set to \"default\" if you are not using namespaces within StorageOS. Namespaces that do not pre-exist within StorageOS will be created. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.storage_os_volume_source import StorageOSVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of StorageOSVolumeSource from a JSON string +storage_os_volume_source_instance = StorageOSVolumeSource.from_json(json) +# print the JSON string representation of the object +print(StorageOSVolumeSource.to_json()) + +# convert the object into a dict +storage_os_volume_source_dict = storage_os_volume_source_instance.to_dict() +# create an instance of StorageOSVolumeSource from a dict +storage_os_volume_source_form_dict = storage_os_volume_source.from_dict(storage_os_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/StreamResultOfEvent.md b/sdks/python/client/docs/StreamResultOfEvent.md index 889ec00f599b..f6a063ee75c5 100644 --- a/sdks/python/client/docs/StreamResultOfEvent.md +++ b/sdks/python/client/docs/StreamResultOfEvent.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] **result** | [**Event**](Event.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.stream_result_of_event import StreamResultOfEvent + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamResultOfEvent from a JSON string +stream_result_of_event_instance = StreamResultOfEvent.from_json(json) +# print the JSON string representation of the object +print(StreamResultOfEvent.to_json()) + +# convert the object into a dict +stream_result_of_event_dict = stream_result_of_event_instance.to_dict() +# create an instance of StreamResultOfEvent from a dict +stream_result_of_event_form_dict = stream_result_of_event.from_dict(stream_result_of_event_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/StreamResultOfEventsourceEventSourceWatchEvent.md b/sdks/python/client/docs/StreamResultOfEventsourceEventSourceWatchEvent.md index 58be17d2d89f..39a1b6e6f8f4 100644 --- a/sdks/python/client/docs/StreamResultOfEventsourceEventSourceWatchEvent.md +++ b/sdks/python/client/docs/StreamResultOfEventsourceEventSourceWatchEvent.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] **result** | [**EventsourceEventSourceWatchEvent**](EventsourceEventSourceWatchEvent.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.stream_result_of_eventsource_event_source_watch_event import StreamResultOfEventsourceEventSourceWatchEvent + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamResultOfEventsourceEventSourceWatchEvent from a JSON string +stream_result_of_eventsource_event_source_watch_event_instance = StreamResultOfEventsourceEventSourceWatchEvent.from_json(json) +# print the JSON string representation of the object +print(StreamResultOfEventsourceEventSourceWatchEvent.to_json()) + +# convert the object into a dict +stream_result_of_eventsource_event_source_watch_event_dict = stream_result_of_eventsource_event_source_watch_event_instance.to_dict() +# create an instance of StreamResultOfEventsourceEventSourceWatchEvent from a dict +stream_result_of_eventsource_event_source_watch_event_form_dict = stream_result_of_eventsource_event_source_watch_event.from_dict(stream_result_of_eventsource_event_source_watch_event_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/StreamResultOfEventsourceLogEntry.md b/sdks/python/client/docs/StreamResultOfEventsourceLogEntry.md index 4144da116a70..09aadb3acd3c 100644 --- a/sdks/python/client/docs/StreamResultOfEventsourceLogEntry.md +++ b/sdks/python/client/docs/StreamResultOfEventsourceLogEntry.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] **result** | [**EventsourceLogEntry**](EventsourceLogEntry.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamResultOfEventsourceLogEntry from a JSON string +stream_result_of_eventsource_log_entry_instance = StreamResultOfEventsourceLogEntry.from_json(json) +# print the JSON string representation of the object +print(StreamResultOfEventsourceLogEntry.to_json()) + +# convert the object into a dict +stream_result_of_eventsource_log_entry_dict = stream_result_of_eventsource_log_entry_instance.to_dict() +# create an instance of StreamResultOfEventsourceLogEntry from a dict +stream_result_of_eventsource_log_entry_form_dict = stream_result_of_eventsource_log_entry.from_dict(stream_result_of_eventsource_log_entry_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry.md b/sdks/python/client/docs/StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry.md index ea7be969ee20..43b0586813a6 100644 --- a/sdks/python/client/docs/StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry.md +++ b/sdks/python/client/docs/StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] **result** | [**IoArgoprojWorkflowV1alpha1LogEntry**](IoArgoprojWorkflowV1alpha1LogEntry.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry from a JSON string +stream_result_of_io_argoproj_workflow_v1alpha1_log_entry_instance = StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry.from_json(json) +# print the JSON string representation of the object +print(StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry.to_json()) + +# convert the object into a dict +stream_result_of_io_argoproj_workflow_v1alpha1_log_entry_dict = stream_result_of_io_argoproj_workflow_v1alpha1_log_entry_instance.to_dict() +# create an instance of StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry from a dict +stream_result_of_io_argoproj_workflow_v1alpha1_log_entry_form_dict = stream_result_of_io_argoproj_workflow_v1alpha1_log_entry.from_dict(stream_result_of_io_argoproj_workflow_v1alpha1_log_entry_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent.md b/sdks/python/client/docs/StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent.md index e2baed5dda4d..615f729b6290 100644 --- a/sdks/python/client/docs/StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent.md +++ b/sdks/python/client/docs/StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] **result** | [**IoArgoprojWorkflowV1alpha1WorkflowWatchEvent**](IoArgoprojWorkflowV1alpha1WorkflowWatchEvent.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event import StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent from a JSON string +stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event_instance = StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent.from_json(json) +# print the JSON string representation of the object +print(StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent.to_json()) + +# convert the object into a dict +stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event_dict = stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event_instance.to_dict() +# create an instance of StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent from a dict +stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event_form_dict = stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event.from_dict(stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/StreamResultOfSensorLogEntry.md b/sdks/python/client/docs/StreamResultOfSensorLogEntry.md index 6deca2b1de80..4d82fb05d28f 100644 --- a/sdks/python/client/docs/StreamResultOfSensorLogEntry.md +++ b/sdks/python/client/docs/StreamResultOfSensorLogEntry.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] **result** | [**SensorLogEntry**](SensorLogEntry.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamResultOfSensorLogEntry from a JSON string +stream_result_of_sensor_log_entry_instance = StreamResultOfSensorLogEntry.from_json(json) +# print the JSON string representation of the object +print(StreamResultOfSensorLogEntry.to_json()) + +# convert the object into a dict +stream_result_of_sensor_log_entry_dict = stream_result_of_sensor_log_entry_instance.to_dict() +# create an instance of StreamResultOfSensorLogEntry from a dict +stream_result_of_sensor_log_entry_form_dict = stream_result_of_sensor_log_entry.from_dict(stream_result_of_sensor_log_entry_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/StreamResultOfSensorSensorWatchEvent.md b/sdks/python/client/docs/StreamResultOfSensorSensorWatchEvent.md index d2e5a752a98d..52b032020c29 100644 --- a/sdks/python/client/docs/StreamResultOfSensorSensorWatchEvent.md +++ b/sdks/python/client/docs/StreamResultOfSensorSensorWatchEvent.md @@ -2,12 +2,29 @@ ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **error** | [**GrpcGatewayRuntimeStreamError**](GrpcGatewayRuntimeStreamError.md) | | [optional] **result** | [**SensorSensorWatchEvent**](SensorSensorWatchEvent.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.stream_result_of_sensor_sensor_watch_event import StreamResultOfSensorSensorWatchEvent + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamResultOfSensorSensorWatchEvent from a JSON string +stream_result_of_sensor_sensor_watch_event_instance = StreamResultOfSensorSensorWatchEvent.from_json(json) +# print the JSON string representation of the object +print(StreamResultOfSensorSensorWatchEvent.to_json()) + +# convert the object into a dict +stream_result_of_sensor_sensor_watch_event_dict = stream_result_of_sensor_sensor_watch_event_instance.to_dict() +# create an instance of StreamResultOfSensorSensorWatchEvent from a dict +stream_result_of_sensor_sensor_watch_event_form_dict = stream_result_of_sensor_sensor_watch_event.from_dict(stream_result_of_sensor_sensor_watch_event_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Sysctl.md b/sdks/python/client/docs/Sysctl.md index 54f87b41fe39..c37a4944e233 100644 --- a/sdks/python/client/docs/Sysctl.md +++ b/sdks/python/client/docs/Sysctl.md @@ -3,12 +3,29 @@ Sysctl defines a kernel parameter to be set ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name of a property to set | **value** | **str** | Value of a property to set | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.sysctl import Sysctl + +# TODO update the JSON string below +json = "{}" +# create an instance of Sysctl from a JSON string +sysctl_instance = Sysctl.from_json(json) +# print the JSON string representation of the object +print(Sysctl.to_json()) + +# convert the object into a dict +sysctl_dict = sysctl_instance.to_dict() +# create an instance of Sysctl from a dict +sysctl_form_dict = sysctl.from_dict(sysctl_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/TCPSocketAction.md b/sdks/python/client/docs/TCPSocketAction.md index afe372e933f1..28aaad6d9787 100644 --- a/sdks/python/client/docs/TCPSocketAction.md +++ b/sdks/python/client/docs/TCPSocketAction.md @@ -3,12 +3,29 @@ TCPSocketAction describes an action based on opening a socket ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**port** | **str** | | **host** | **str** | Optional: Host name to connect to, defaults to the pod IP. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**port** | **str** | | + +## Example + +```python +from argo_workflows.models.tcp_socket_action import TCPSocketAction + +# TODO update the JSON string below +json = "{}" +# create an instance of TCPSocketAction from a JSON string +tcp_socket_action_instance = TCPSocketAction.from_json(json) +# print the JSON string representation of the object +print(TCPSocketAction.to_json()) +# convert the object into a dict +tcp_socket_action_dict = tcp_socket_action_instance.to_dict() +# create an instance of TCPSocketAction from a dict +tcp_socket_action_form_dict = tcp_socket_action.from_dict(tcp_socket_action_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Toleration.md b/sdks/python/client/docs/Toleration.md index 2bf08a389930..48a36aaf6768 100644 --- a/sdks/python/client/docs/Toleration.md +++ b/sdks/python/client/docs/Toleration.md @@ -3,6 +3,7 @@ The pod this Toleration is attached to tolerates any taint that matches the triple using the matching operator . ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **effect** | **str** | Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute. Possible enum values: - `\"NoExecute\"` Evict any already-running pods that do not tolerate the taint. Currently enforced by NodeController. - `\"NoSchedule\"` Do not allow new pods to schedule onto the node unless they tolerate the taint, but allow all pods submitted to Kubelet without going through the scheduler to start, and allow all already-running pods to continue running. Enforced by the scheduler. - `\"PreferNoSchedule\"` Like TaintEffectNoSchedule, but the scheduler tries not to schedule new pods onto the node, rather than prohibiting new pods from scheduling onto the node entirely. Enforced by the scheduler. | [optional] @@ -10,8 +11,24 @@ Name | Type | Description | Notes **operator** | **str** | Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category. Possible enum values: - `\"Equal\"` - `\"Exists\"` | [optional] **toleration_seconds** | **int** | TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system. | [optional] **value** | **str** | Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.toleration import Toleration + +# TODO update the JSON string below +json = "{}" +# create an instance of Toleration from a JSON string +toleration_instance = Toleration.from_json(json) +# print the JSON string representation of the object +print(Toleration.to_json()) + +# convert the object into a dict +toleration_dict = toleration_instance.to_dict() +# create an instance of Toleration from a dict +toleration_form_dict = toleration.from_dict(toleration_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/TypedLocalObjectReference.md b/sdks/python/client/docs/TypedLocalObjectReference.md index 8740086ee0e2..7ad21301dcd4 100644 --- a/sdks/python/client/docs/TypedLocalObjectReference.md +++ b/sdks/python/client/docs/TypedLocalObjectReference.md @@ -3,13 +3,30 @@ TypedLocalObjectReference contains enough information to let you locate the typed referenced object inside the same namespace. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**api_group** | **str** | APIGroup is the group for the resource being referenced. If APIGroup is not specified, the specified Kind must be in the core API group. For any other third-party types, APIGroup is required. | [optional] **kind** | **str** | Kind is the type of resource being referenced | **name** | **str** | Name is the name of resource being referenced | -**api_group** | **str** | APIGroup is the group for the resource being referenced. If APIGroup is not specified, the specified Kind must be in the core API group. For any other third-party types, APIGroup is required. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.typed_local_object_reference import TypedLocalObjectReference + +# TODO update the JSON string below +json = "{}" +# create an instance of TypedLocalObjectReference from a JSON string +typed_local_object_reference_instance = TypedLocalObjectReference.from_json(json) +# print the JSON string representation of the object +print(TypedLocalObjectReference.to_json()) + +# convert the object into a dict +typed_local_object_reference_dict = typed_local_object_reference_instance.to_dict() +# create an instance of TypedLocalObjectReference from a dict +typed_local_object_reference_form_dict = typed_local_object_reference.from_dict(typed_local_object_reference_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/Volume.md b/sdks/python/client/docs/Volume.md index 476da1a7629b..a5d357bcbd6c 100644 --- a/sdks/python/client/docs/Volume.md +++ b/sdks/python/client/docs/Volume.md @@ -3,9 +3,9 @@ Volume represents a named volume in a pod that may be accessed by any container in the pod. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **str** | Volume's name. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | **aws_elastic_block_store** | [**AWSElasticBlockStoreVolumeSource**](AWSElasticBlockStoreVolumeSource.md) | | [optional] **azure_disk** | [**AzureDiskVolumeSource**](AzureDiskVolumeSource.md) | | [optional] **azure_file** | [**AzureFileVolumeSource**](AzureFileVolumeSource.md) | | [optional] @@ -24,6 +24,7 @@ Name | Type | Description | Notes **glusterfs** | [**GlusterfsVolumeSource**](GlusterfsVolumeSource.md) | | [optional] **host_path** | [**HostPathVolumeSource**](HostPathVolumeSource.md) | | [optional] **iscsi** | [**ISCSIVolumeSource**](ISCSIVolumeSource.md) | | [optional] +**name** | **str** | Volume's name. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | **nfs** | [**NFSVolumeSource**](NFSVolumeSource.md) | | [optional] **persistent_volume_claim** | [**PersistentVolumeClaimVolumeSource**](PersistentVolumeClaimVolumeSource.md) | | [optional] **photon_persistent_disk** | [**PhotonPersistentDiskVolumeSource**](PhotonPersistentDiskVolumeSource.md) | | [optional] @@ -35,8 +36,24 @@ Name | Type | Description | Notes **secret** | [**SecretVolumeSource**](SecretVolumeSource.md) | | [optional] **storageos** | [**StorageOSVolumeSource**](StorageOSVolumeSource.md) | | [optional] **vsphere_volume** | [**VsphereVirtualDiskVolumeSource**](VsphereVirtualDiskVolumeSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.volume import Volume + +# TODO update the JSON string below +json = "{}" +# create an instance of Volume from a JSON string +volume_instance = Volume.from_json(json) +# print the JSON string representation of the object +print(Volume.to_json()) + +# convert the object into a dict +volume_dict = volume_instance.to_dict() +# create an instance of Volume from a dict +volume_form_dict = volume.from_dict(volume_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/VolumeDevice.md b/sdks/python/client/docs/VolumeDevice.md index 25ac6e26fb77..ba102ca79ea5 100644 --- a/sdks/python/client/docs/VolumeDevice.md +++ b/sdks/python/client/docs/VolumeDevice.md @@ -3,12 +3,29 @@ volumeDevice describes a mapping of a raw block device within a container. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **device_path** | **str** | devicePath is the path inside of the container that the device will be mapped to. | **name** | **str** | name must match the name of a persistentVolumeClaim in the pod | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.volume_device import VolumeDevice + +# TODO update the JSON string below +json = "{}" +# create an instance of VolumeDevice from a JSON string +volume_device_instance = VolumeDevice.from_json(json) +# print the JSON string representation of the object +print(VolumeDevice.to_json()) + +# convert the object into a dict +volume_device_dict = volume_device_instance.to_dict() +# create an instance of VolumeDevice from a dict +volume_device_form_dict = volume_device.from_dict(volume_device_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/VolumeMount.md b/sdks/python/client/docs/VolumeMount.md index 5c547a6068b9..cebac0fe37ee 100644 --- a/sdks/python/client/docs/VolumeMount.md +++ b/sdks/python/client/docs/VolumeMount.md @@ -3,16 +3,33 @@ VolumeMount describes a mounting of a Volume within a container. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **mount_path** | **str** | Path within the container at which the volume should be mounted. Must not contain ':'. | -**name** | **str** | This must match the Name of a Volume. | **mount_propagation** | **str** | mountPropagation determines how mounts are propagated from the host to container and the other way around. When not set, MountPropagationNone is used. This field is beta in 1.10. | [optional] +**name** | **str** | This must match the Name of a Volume. | **read_only** | **bool** | Mounted read-only if true, read-write otherwise (false or unspecified). Defaults to false. | [optional] **sub_path** | **str** | Path within the volume from which the container's volume should be mounted. Defaults to \"\" (volume's root). | [optional] **sub_path_expr** | **str** | Expanded path within the volume from which the container's volume should be mounted. Behaves similarly to SubPath but environment variable references $(VAR_NAME) are expanded using the container's environment. Defaults to \"\" (volume's root). SubPathExpr and SubPath are mutually exclusive. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.volume_mount import VolumeMount + +# TODO update the JSON string below +json = "{}" +# create an instance of VolumeMount from a JSON string +volume_mount_instance = VolumeMount.from_json(json) +# print the JSON string representation of the object +print(VolumeMount.to_json()) + +# convert the object into a dict +volume_mount_dict = volume_mount_instance.to_dict() +# create an instance of VolumeMount from a dict +volume_mount_form_dict = volume_mount.from_dict(volume_mount_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/VolumeProjection.md b/sdks/python/client/docs/VolumeProjection.md index b09537446477..5aea4dbe718b 100644 --- a/sdks/python/client/docs/VolumeProjection.md +++ b/sdks/python/client/docs/VolumeProjection.md @@ -3,14 +3,31 @@ Projection that may be projected along with other supported volume types ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config_map** | [**ConfigMapProjection**](ConfigMapProjection.md) | | [optional] **downward_api** | [**DownwardAPIProjection**](DownwardAPIProjection.md) | | [optional] **secret** | [**SecretProjection**](SecretProjection.md) | | [optional] **service_account_token** | [**ServiceAccountTokenProjection**](ServiceAccountTokenProjection.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.volume_projection import VolumeProjection + +# TODO update the JSON string below +json = "{}" +# create an instance of VolumeProjection from a JSON string +volume_projection_instance = VolumeProjection.from_json(json) +# print the JSON string representation of the object +print(VolumeProjection.to_json()) + +# convert the object into a dict +volume_projection_dict = volume_projection_instance.to_dict() +# create an instance of VolumeProjection from a dict +volume_projection_form_dict = volume_projection.from_dict(volume_projection_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/VsphereVirtualDiskVolumeSource.md b/sdks/python/client/docs/VsphereVirtualDiskVolumeSource.md index 90e3495d723b..282ebfae45be 100644 --- a/sdks/python/client/docs/VsphereVirtualDiskVolumeSource.md +++ b/sdks/python/client/docs/VsphereVirtualDiskVolumeSource.md @@ -3,14 +3,31 @@ Represents a vSphere volume resource. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**volume_path** | **str** | Path that identifies vSphere volume vmdk | **fs_type** | **str** | Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. | [optional] **storage_policy_id** | **str** | Storage Policy Based Management (SPBM) profile ID associated with the StoragePolicyName. | [optional] **storage_policy_name** | **str** | Storage Policy Based Management (SPBM) profile name. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +**volume_path** | **str** | Path that identifies vSphere volume vmdk | + +## Example + +```python +from argo_workflows.models.vsphere_virtual_disk_volume_source import VsphereVirtualDiskVolumeSource + +# TODO update the JSON string below +json = "{}" +# create an instance of VsphereVirtualDiskVolumeSource from a JSON string +vsphere_virtual_disk_volume_source_instance = VsphereVirtualDiskVolumeSource.from_json(json) +# print the JSON string representation of the object +print(VsphereVirtualDiskVolumeSource.to_json()) +# convert the object into a dict +vsphere_virtual_disk_volume_source_dict = vsphere_virtual_disk_volume_source_instance.to_dict() +# create an instance of VsphereVirtualDiskVolumeSource from a dict +vsphere_virtual_disk_volume_source_form_dict = vsphere_virtual_disk_volume_source.from_dict(vsphere_virtual_disk_volume_source_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/WeightedPodAffinityTerm.md b/sdks/python/client/docs/WeightedPodAffinityTerm.md index 738e175aea96..ec18a51ff856 100644 --- a/sdks/python/client/docs/WeightedPodAffinityTerm.md +++ b/sdks/python/client/docs/WeightedPodAffinityTerm.md @@ -3,12 +3,29 @@ The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s) ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **pod_affinity_term** | [**PodAffinityTerm**](PodAffinityTerm.md) | | **weight** | **int** | weight associated with matching the corresponding podAffinityTerm, in the range 1-100. | -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.weighted_pod_affinity_term import WeightedPodAffinityTerm + +# TODO update the JSON string below +json = "{}" +# create an instance of WeightedPodAffinityTerm from a JSON string +weighted_pod_affinity_term_instance = WeightedPodAffinityTerm.from_json(json) +# print the JSON string representation of the object +print(WeightedPodAffinityTerm.to_json()) + +# convert the object into a dict +weighted_pod_affinity_term_dict = weighted_pod_affinity_term_instance.to_dict() +# create an instance of WeightedPodAffinityTerm from a dict +weighted_pod_affinity_term_form_dict = weighted_pod_affinity_term.from_dict(weighted_pod_affinity_term_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/WindowsSecurityContextOptions.md b/sdks/python/client/docs/WindowsSecurityContextOptions.md index 12399ae91004..390940c56007 100644 --- a/sdks/python/client/docs/WindowsSecurityContextOptions.md +++ b/sdks/python/client/docs/WindowsSecurityContextOptions.md @@ -3,14 +3,31 @@ WindowsSecurityContextOptions contain Windows-specific options and credentials. ## Properties + Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **gmsa_credential_spec** | **str** | GMSACredentialSpec is where the GMSA admission webhook (https://github.com/kubernetes-sigs/windows-gmsa) inlines the contents of the GMSA credential spec named by the GMSACredentialSpecName field. | [optional] **gmsa_credential_spec_name** | **str** | GMSACredentialSpecName is the name of the GMSA credential spec to use. | [optional] **host_process** | **bool** | HostProcess determines if a container should be run as a 'Host Process' container. This field is alpha-level and will only be honored by components that enable the WindowsHostProcessContainers feature flag. Setting this field without the feature flag will result in errors when validating the Pod. All of a Pod's containers must have the same effective HostProcess value (it is not allowed to have a mix of HostProcess containers and non-HostProcess containers). In addition, if HostProcess is true then HostNetwork must also be set to true. | [optional] **run_as_user_name** | **str** | The UserName in Windows to run the entrypoint of the container process. Defaults to the user specified in image metadata if unspecified. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] +## Example + +```python +from argo_workflows.models.windows_security_context_options import WindowsSecurityContextOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of WindowsSecurityContextOptions from a JSON string +windows_security_context_options_instance = WindowsSecurityContextOptions.from_json(json) +# print the JSON string representation of the object +print(WindowsSecurityContextOptions.to_json()) + +# convert the object into a dict +windows_security_context_options_dict = windows_security_context_options_instance.to_dict() +# create an instance of WindowsSecurityContextOptions from a dict +windows_security_context_options_form_dict = windows_security_context_options.from_dict(windows_security_context_options_dict) +``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/WorkflowServiceApi.md b/sdks/python/client/docs/WorkflowServiceApi.md index 4296e4211333..44c63b6e94fa 100644 --- a/sdks/python/client/docs/WorkflowServiceApi.md +++ b/sdks/python/client/docs/WorkflowServiceApi.md @@ -33,13 +33,12 @@ Method | HTTP request | Description * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_create_request import IoArgoprojWorkflowV1alpha1WorkflowCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_create_request import IoArgoprojWorkflowV1alpha1WorkflowCreateRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -52,7 +51,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -60,27315 +59,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowCreateRequest( - create_options=CreateOptions( - dry_run=[ - "dry_run_example", - ], - field_manager="field_manager_example", - field_validation="field_validation_example", - ), - instance_id="instance_id_example", - namespace="namespace_example", - server_dry_run=True, - workflow=IoArgoprojWorkflowV1alpha1Workflow( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - status=IoArgoprojWorkflowV1alpha1WorkflowStatus( - artifact_gc_status=IoArgoprojWorkflowV1alpha1ArtGCStatus( - not_specified=True, - pods_recouped={ - "key": True, - }, - strategies_processed={ - "key": True, - }, - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus( - artifact_repository=IoArgoprojWorkflowV1alpha1ArtifactRepository( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository( - key_format="key_format_example", - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo_url="repo_url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifactRepository( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob_name_format="blob_name_format_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifactRepository( - bucket="bucket_example", - key_format="key_format_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifactRepository( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path_format="path_format_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifactRepository( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key_format="key_format_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - s3=IoArgoprojWorkflowV1alpha1S3ArtifactRepository( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key_format="key_format_example", - key_prefix="key_prefix_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - config_map="config_map_example", - default=True, - key="key_example", - namespace="namespace_example", - ), - compressed_nodes="compressed_nodes_example", - conditions=[ - IoArgoprojWorkflowV1alpha1Condition( - message="message_example", - status="status_example", - type="type_example", - ), - ], - estimated_duration=1, - finished_at=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - nodes={ - "key": IoArgoprojWorkflowV1alpha1NodeStatus( - boundary_id="boundary_id_example", - children=[ - "children_example", - ], - daemoned=True, - display_name="display_name_example", - estimated_duration=1, - finished_at=dateutil_parser('1970-01-01T00:00:00.00Z'), - host_node_name="host_node_name_example", - id="id_example", - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoization_status=IoArgoprojWorkflowV1alpha1MemoizationStatus( - cache_name="cache_name_example", - hit=True, - key="key_example", - ), - message="message_example", - name="name_example", - node_flag=IoArgoprojWorkflowV1alpha1NodeFlag( - hooked=True, - retried=True, - ), - outbound_nodes=[ - "outbound_nodes_example", - ], - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - phase="phase_example", - pod_ip="pod_ip_example", - progress="progress_example", - resources_duration={ - "key": 1, - }, - started_at=dateutil_parser('1970-01-01T00:00:00.00Z'), - synchronization_status=IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus( - waiting="waiting_example", - ), - template_name="template_name_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - template_scope="template_scope_example", - type="type_example", - ), - }, - offload_node_status_version="offload_node_status_version_example", - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - persistent_volume_claims=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - phase="phase_example", - progress="progress_example", - resources_duration={ - "key": 1, - }, - started_at=dateutil_parser('1970-01-01T00:00:00.00Z'), - stored_templates={ - "key": IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - }, - stored_workflow_template_spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - synchronization=IoArgoprojWorkflowV1alpha1SynchronizationStatus( - mutex=IoArgoprojWorkflowV1alpha1MutexStatus( - holding=[ - IoArgoprojWorkflowV1alpha1MutexHolding( - holder="holder_example", - mutex="mutex_example", - ), - ], - waiting=[ - IoArgoprojWorkflowV1alpha1MutexHolding( - holder="holder_example", - mutex="mutex_example", - ), - ], - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreStatus( - holding=[ - IoArgoprojWorkflowV1alpha1SemaphoreHolding( - holders=[ - "holders_example", - ], - semaphore="semaphore_example", - ), - ], - waiting=[ - IoArgoprojWorkflowV1alpha1SemaphoreHolding( - holders=[ - "holders_example", - ], - semaphore="semaphore_example", - ), - ], - ), - ), - task_results_completion_status={ - "key": True, - }, - ), - ), - ) # IoArgoprojWorkflowV1alpha1WorkflowCreateRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowCreateRequest() # IoArgoprojWorkflowV1alpha1WorkflowCreateRequest | + try: api_response = api_instance.create_workflow(namespace, body) + print("The response of WorkflowServiceApi->create_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->create_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowCreateRequest**](IoArgoprojWorkflowV1alpha1WorkflowCreateRequest.md)| | + **namespace** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowCreateRequest**](IoArgoprojWorkflowV1alpha1WorkflowCreateRequest.md)| | ### Return type @@ -27383,7 +94,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -27394,7 +104,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **delete_workflow** -> bool, date, datetime, dict, float, int, list, str, none_type delete_workflow(namespace, name) +> object delete_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, force=force) @@ -27403,11 +113,10 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -27420,7 +129,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -27428,53 +137,45 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - delete_options_grace_period_seconds = "deleteOptions.gracePeriodSeconds_example" # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) - delete_options_preconditions_uid = "deleteOptions.preconditions.uid_example" # str | Specifies the target UID. +optional. (optional) - delete_options_preconditions_resource_version = "deleteOptions.preconditions.resourceVersion_example" # str | Specifies the target ResourceVersion +optional. (optional) + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + delete_options_grace_period_seconds = 'delete_options_grace_period_seconds_example' # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) + delete_options_preconditions_uid = 'delete_options_preconditions_uid_example' # str | Specifies the target UID. +optional. (optional) + delete_options_preconditions_resource_version = 'delete_options_preconditions_resource_version_example' # str | Specifies the target ResourceVersion +optional. (optional) delete_options_orphan_dependents = True # bool | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. (optional) - delete_options_propagation_policy = "deleteOptions.propagationPolicy_example" # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) - delete_options_dry_run = [ - "deleteOptions.dryRun_example", - ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) + delete_options_propagation_policy = 'delete_options_propagation_policy_example' # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) + delete_options_dry_run = ['delete_options_dry_run_example'] # List[str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) force = True # bool | (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.delete_workflow(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowServiceApi->delete_workflow: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.delete_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, force=force) + print("The response of WorkflowServiceApi->delete_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->delete_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] - **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] - **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] - **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] - **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] - **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] - **force** | **bool**| | [optional] + **namespace** | **str**| | + **name** | **str**| | + **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] + **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] + **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] + **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] + **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] + **delete_options_dry_run** | [**List[str]**](str.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] + **force** | **bool**| | [optional] ### Return type -**bool, date, datetime, dict, float, int, list, str, none_type** +**object** ### Authorization @@ -27485,7 +186,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -27496,7 +196,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_workflow** -> IoArgoprojWorkflowV1alpha1Workflow get_workflow(namespace, name) +> IoArgoprojWorkflowV1alpha1Workflow get_workflow(namespace, name, get_options_resource_version=get_options_resource_version, fields=fields) @@ -27505,12 +205,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -27523,7 +222,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -27531,37 +230,31 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - get_options_resource_version = "getOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - fields = "fields_example" # str | Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\". (optional) + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + get_options_resource_version = 'get_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + fields = 'fields_example' # str | Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\". (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.get_workflow(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowServiceApi->get_workflow: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.get_workflow(namespace, name, get_options_resource_version=get_options_resource_version, fields=fields) + print("The response of WorkflowServiceApi->get_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->get_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **fields** | **str**| Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\". | [optional] + **namespace** | **str**| | + **name** | **str**| | + **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **fields** | **str**| Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\". | [optional] ### Return type @@ -27576,7 +269,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -27596,13 +288,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_lint_request import IoArgoprojWorkflowV1alpha1WorkflowLintRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_lint_request import IoArgoprojWorkflowV1alpha1WorkflowLintRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -27615,7 +306,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -27623,27306 +314,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowLintRequest( - namespace="namespace_example", - workflow=IoArgoprojWorkflowV1alpha1Workflow( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - status=IoArgoprojWorkflowV1alpha1WorkflowStatus( - artifact_gc_status=IoArgoprojWorkflowV1alpha1ArtGCStatus( - not_specified=True, - pods_recouped={ - "key": True, - }, - strategies_processed={ - "key": True, - }, - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRefStatus( - artifact_repository=IoArgoprojWorkflowV1alpha1ArtifactRepository( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifactRepository( - key_format="key_format_example", - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo_url="repo_url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifactRepository( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob_name_format="blob_name_format_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifactRepository( - bucket="bucket_example", - key_format="key_format_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifactRepository( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path_format="path_format_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifactRepository( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key_format="key_format_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - s3=IoArgoprojWorkflowV1alpha1S3ArtifactRepository( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key_format="key_format_example", - key_prefix="key_prefix_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - config_map="config_map_example", - default=True, - key="key_example", - namespace="namespace_example", - ), - compressed_nodes="compressed_nodes_example", - conditions=[ - IoArgoprojWorkflowV1alpha1Condition( - message="message_example", - status="status_example", - type="type_example", - ), - ], - estimated_duration=1, - finished_at=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - nodes={ - "key": IoArgoprojWorkflowV1alpha1NodeStatus( - boundary_id="boundary_id_example", - children=[ - "children_example", - ], - daemoned=True, - display_name="display_name_example", - estimated_duration=1, - finished_at=dateutil_parser('1970-01-01T00:00:00.00Z'), - host_node_name="host_node_name_example", - id="id_example", - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoization_status=IoArgoprojWorkflowV1alpha1MemoizationStatus( - cache_name="cache_name_example", - hit=True, - key="key_example", - ), - message="message_example", - name="name_example", - node_flag=IoArgoprojWorkflowV1alpha1NodeFlag( - hooked=True, - retried=True, - ), - outbound_nodes=[ - "outbound_nodes_example", - ], - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - phase="phase_example", - pod_ip="pod_ip_example", - progress="progress_example", - resources_duration={ - "key": 1, - }, - started_at=dateutil_parser('1970-01-01T00:00:00.00Z'), - synchronization_status=IoArgoprojWorkflowV1alpha1NodeSynchronizationStatus( - waiting="waiting_example", - ), - template_name="template_name_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - template_scope="template_scope_example", - type="type_example", - ), - }, - offload_node_status_version="offload_node_status_version_example", - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - persistent_volume_claims=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - phase="phase_example", - progress="progress_example", - resources_duration={ - "key": 1, - }, - started_at=dateutil_parser('1970-01-01T00:00:00.00Z'), - stored_templates={ - "key": IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - }, - stored_workflow_template_spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - synchronization=IoArgoprojWorkflowV1alpha1SynchronizationStatus( - mutex=IoArgoprojWorkflowV1alpha1MutexStatus( - holding=[ - IoArgoprojWorkflowV1alpha1MutexHolding( - holder="holder_example", - mutex="mutex_example", - ), - ], - waiting=[ - IoArgoprojWorkflowV1alpha1MutexHolding( - holder="holder_example", - mutex="mutex_example", - ), - ], - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreStatus( - holding=[ - IoArgoprojWorkflowV1alpha1SemaphoreHolding( - holders=[ - "holders_example", - ], - semaphore="semaphore_example", - ), - ], - waiting=[ - IoArgoprojWorkflowV1alpha1SemaphoreHolding( - holders=[ - "holders_example", - ], - semaphore="semaphore_example", - ), - ], - ), - ), - task_results_completion_status={ - "key": True, - }, - ), - ), - ) # IoArgoprojWorkflowV1alpha1WorkflowLintRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowLintRequest() # IoArgoprojWorkflowV1alpha1WorkflowLintRequest | + try: api_response = api_instance.lint_workflow(namespace, body) + print("The response of WorkflowServiceApi->lint_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->lint_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowLintRequest**](IoArgoprojWorkflowV1alpha1WorkflowLintRequest.md)| | + **namespace** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowLintRequest**](IoArgoprojWorkflowV1alpha1WorkflowLintRequest.md)| | ### Return type @@ -54937,7 +349,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -54948,7 +359,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_workflows** -> IoArgoprojWorkflowV1alpha1WorkflowList list_workflows(namespace) +> IoArgoprojWorkflowV1alpha1WorkflowList list_workflows(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue, fields=fields) @@ -54957,12 +368,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_list import IoArgoprojWorkflowV1alpha1WorkflowList +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -54975,7 +385,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -54983,51 +393,45 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - fields = "fields_example" # str | Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\". (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.list_workflows(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowServiceApi->list_workflows: %s\n" % e) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) + fields = 'fields_example' # str | Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\". (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.list_workflows(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue, fields=fields) + print("The response of WorkflowServiceApi->list_workflows:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->list_workflows: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - **fields** | **str**| Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\". | [optional] + **namespace** | **str**| | + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **fields** | **str**| Fields to be included or excluded in the response. e.g. \"items.spec,items.status.phase\", \"-items.status.nodes\". | [optional] ### Return type @@ -55042,7 +446,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55053,7 +456,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **pod_logs** -> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry pod_logs(namespace, name, pod_name) +> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry pod_logs(namespace, name, pod_name, log_options_container=log_options_container, log_options_follow=log_options_follow, log_options_previous=log_options_previous, log_options_since_seconds=log_options_since_seconds, log_options_since_time_seconds=log_options_since_time_seconds, log_options_since_time_nanos=log_options_since_time_nanos, log_options_timestamps=log_options_timestamps, log_options_tail_lines=log_options_tail_lines, log_options_limit_bytes=log_options_limit_bytes, log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, grep=grep, selector=selector) DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. @@ -55062,12 +465,11 @@ DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55080,7 +482,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55088,61 +490,54 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - pod_name = "podName_example" # str | - log_options_container = "logOptions.container_example" # str | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. (optional) + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + pod_name = 'pod_name_example' # str | + log_options_container = 'log_options_container_example' # str | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. (optional) log_options_follow = True # bool | Follow the log stream of the pod. Defaults to false. +optional. (optional) log_options_previous = True # bool | Return previous terminated container logs. Defaults to false. +optional. (optional) - log_options_since_seconds = "logOptions.sinceSeconds_example" # str | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. (optional) - log_options_since_time_seconds = "logOptions.sinceTime.seconds_example" # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) - log_options_since_time_nanos = 1 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) + log_options_since_seconds = 'log_options_since_seconds_example' # str | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. (optional) + log_options_since_time_seconds = 'log_options_since_time_seconds_example' # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) + log_options_since_time_nanos = 56 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) log_options_timestamps = True # bool | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. (optional) - log_options_tail_lines = "logOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) - log_options_limit_bytes = "logOptions.limitBytes_example" # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) + log_options_tail_lines = 'log_options_tail_lines_example' # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) + log_options_limit_bytes = 'log_options_limit_bytes_example' # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) log_options_insecure_skip_tls_verify_backend = True # bool | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. (optional) - grep = "grep_example" # str | (optional) - selector = "selector_example" # str | (optional) + grep = 'grep_example' # str | (optional) + selector = 'selector_example' # str | (optional) - # example passing only required values which don't have defaults set - try: - # DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. - api_response = api_instance.pod_logs(namespace, name, pod_name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowServiceApi->pod_logs: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: # DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. api_response = api_instance.pod_logs(namespace, name, pod_name, log_options_container=log_options_container, log_options_follow=log_options_follow, log_options_previous=log_options_previous, log_options_since_seconds=log_options_since_seconds, log_options_since_time_seconds=log_options_since_time_seconds, log_options_since_time_nanos=log_options_since_time_nanos, log_options_timestamps=log_options_timestamps, log_options_tail_lines=log_options_tail_lines, log_options_limit_bytes=log_options_limit_bytes, log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, grep=grep, selector=selector) + print("The response of WorkflowServiceApi->pod_logs:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->pod_logs: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **pod_name** | **str**| | - **log_options_container** | **str**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] - **log_options_follow** | **bool**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] - **log_options_previous** | **bool**| Return previous terminated container logs. Defaults to false. +optional. | [optional] - **log_options_since_seconds** | **str**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] - **log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] - **log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] - **log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] - **log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] - **log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] - **grep** | **str**| | [optional] - **selector** | **str**| | [optional] + **namespace** | **str**| | + **name** | **str**| | + **pod_name** | **str**| | + **log_options_container** | **str**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] + **log_options_follow** | **bool**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] + **log_options_previous** | **bool**| Return previous terminated container logs. Defaults to false. +optional. | [optional] + **log_options_since_seconds** | **str**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] + **log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] + **log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] + **log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] + **log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] + **log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **grep** | **str**| | [optional] + **selector** | **str**| | [optional] ### Return type @@ -55157,7 +552,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55177,13 +571,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_resubmit_request import IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resubmit_request import IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55196,7 +589,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55204,34 +597,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest( - memoized=True, - name="name_example", - namespace="namespace_example", - parameters=[ - "parameters_example", - ], - ) # IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest() # IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest | + try: api_response = api_instance.resubmit_workflow(namespace, name, body) + print("The response of WorkflowServiceApi->resubmit_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->resubmit_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest**](IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest**](IoArgoprojWorkflowV1alpha1WorkflowResubmitRequest.md)| | ### Return type @@ -55246,7 +634,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55266,13 +653,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_resume_request import IoArgoprojWorkflowV1alpha1WorkflowResumeRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_resume_request import IoArgoprojWorkflowV1alpha1WorkflowResumeRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55285,7 +671,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55293,31 +679,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowResumeRequest( - name="name_example", - namespace="namespace_example", - node_field_selector="node_field_selector_example", - ) # IoArgoprojWorkflowV1alpha1WorkflowResumeRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowResumeRequest() # IoArgoprojWorkflowV1alpha1WorkflowResumeRequest | + try: api_response = api_instance.resume_workflow(namespace, name, body) + print("The response of WorkflowServiceApi->resume_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->resume_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowResumeRequest**](IoArgoprojWorkflowV1alpha1WorkflowResumeRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowResumeRequest**](IoArgoprojWorkflowV1alpha1WorkflowResumeRequest.md)| | ### Return type @@ -55332,7 +716,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55352,13 +735,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_retry_request import IoArgoprojWorkflowV1alpha1WorkflowRetryRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_retry_request import IoArgoprojWorkflowV1alpha1WorkflowRetryRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55371,7 +753,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55379,35 +761,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowRetryRequest( - name="name_example", - namespace="namespace_example", - node_field_selector="node_field_selector_example", - parameters=[ - "parameters_example", - ], - restart_successful=True, - ) # IoArgoprojWorkflowV1alpha1WorkflowRetryRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowRetryRequest() # IoArgoprojWorkflowV1alpha1WorkflowRetryRequest | + try: api_response = api_instance.retry_workflow(namespace, name, body) + print("The response of WorkflowServiceApi->retry_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->retry_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowRetryRequest**](IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowRetryRequest**](IoArgoprojWorkflowV1alpha1WorkflowRetryRequest.md)| | ### Return type @@ -55422,7 +798,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55442,13 +817,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_set_request import IoArgoprojWorkflowV1alpha1WorkflowSetRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_set_request import IoArgoprojWorkflowV1alpha1WorkflowSetRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55461,7 +835,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55469,34 +843,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowSetRequest( - message="message_example", - name="name_example", - namespace="namespace_example", - node_field_selector="node_field_selector_example", - output_parameters="output_parameters_example", - phase="phase_example", - ) # IoArgoprojWorkflowV1alpha1WorkflowSetRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowSetRequest() # IoArgoprojWorkflowV1alpha1WorkflowSetRequest | + try: api_response = api_instance.set_workflow(namespace, name, body) + print("The response of WorkflowServiceApi->set_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->set_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowSetRequest**](IoArgoprojWorkflowV1alpha1WorkflowSetRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowSetRequest**](IoArgoprojWorkflowV1alpha1WorkflowSetRequest.md)| | ### Return type @@ -55511,7 +880,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55531,13 +899,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_stop_request import IoArgoprojWorkflowV1alpha1WorkflowStopRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_stop_request import IoArgoprojWorkflowV1alpha1WorkflowStopRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55550,7 +917,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55558,32 +925,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowStopRequest( - message="message_example", - name="name_example", - namespace="namespace_example", - node_field_selector="node_field_selector_example", - ) # IoArgoprojWorkflowV1alpha1WorkflowStopRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowStopRequest() # IoArgoprojWorkflowV1alpha1WorkflowStopRequest | + try: api_response = api_instance.stop_workflow(namespace, name, body) + print("The response of WorkflowServiceApi->stop_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->stop_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowStopRequest**](IoArgoprojWorkflowV1alpha1WorkflowStopRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowStopRequest**](IoArgoprojWorkflowV1alpha1WorkflowStopRequest.md)| | ### Return type @@ -55598,7 +962,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55618,13 +981,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_submit_request import IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_submit_request import IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55637,7 +999,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55645,52 +1007,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest( - namespace="namespace_example", - resource_kind="resource_kind_example", - resource_name="resource_name_example", - submit_options=IoArgoprojWorkflowV1alpha1SubmitOpts( - annotations="annotations_example", - dry_run=True, - entry_point="entry_point_example", - generate_name="generate_name_example", - labels="labels_example", - name="name_example", - owner_reference=OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - parameters=[ - "parameters_example", - ], - pod_priority_class_name="pod_priority_class_name_example", - priority=1, - server_dry_run=True, - service_account="service_account_example", - ), - ) # IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest() # IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest | + try: api_response = api_instance.submit_workflow(namespace, body) + print("The response of WorkflowServiceApi->submit_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->submit_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest**](IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest.md)| | + **namespace** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest**](IoArgoprojWorkflowV1alpha1WorkflowSubmitRequest.md)| | ### Return type @@ -55705,7 +1042,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55725,13 +1061,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_suspend_request import IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_suspend_request import IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55744,7 +1079,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55752,30 +1087,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest( - name="name_example", - namespace="namespace_example", - ) # IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest() # IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest | + try: api_response = api_instance.suspend_workflow(namespace, name, body) + print("The response of WorkflowServiceApi->suspend_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->suspend_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest**](IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest**](IoArgoprojWorkflowV1alpha1WorkflowSuspendRequest.md)| | ### Return type @@ -55790,7 +1124,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55810,13 +1143,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_terminate_request import IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow import IoArgoprojWorkflowV1alpha1Workflow +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_terminate_request import IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55829,7 +1161,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55837,30 +1169,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest( - name="name_example", - namespace="namespace_example", - ) # IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest | - - # example passing only required values which don't have defaults set + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest() # IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest | + try: api_response = api_instance.terminate_workflow(namespace, name, body) + print("The response of WorkflowServiceApi->terminate_workflow:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->terminate_workflow: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest**](IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest.md)| | + **namespace** | **str**| | + **name** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest**](IoArgoprojWorkflowV1alpha1WorkflowTerminateRequest.md)| | ### Return type @@ -55875,7 +1206,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55886,7 +1216,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **watch_events** -> StreamResultOfEvent watch_events(namespace) +> StreamResultOfEvent watch_events(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) @@ -55895,12 +1225,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_event import StreamResultOfEvent +from argo_workflows.models.stream_result_of_event import StreamResultOfEvent +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -55913,7 +1242,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -55921,49 +1250,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.watch_events(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowServiceApi->watch_events: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.watch_events(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) + print("The response of WorkflowServiceApi->watch_events:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->watch_events: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **namespace** | **str**| | + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] ### Return type @@ -55978,7 +1301,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -55989,7 +1311,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **watch_workflows** -> StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent watch_workflows(namespace) +> StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent watch_workflows(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue, fields=fields) @@ -55998,12 +1320,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event import StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_workflow_watch_event import StreamResultOfIoArgoprojWorkflowV1alpha1WorkflowWatchEvent +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -56016,7 +1337,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -56024,51 +1345,45 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - fields = "fields_example" # str | (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.watch_workflows(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowServiceApi->watch_workflows: %s\n" % e) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) + fields = 'fields_example' # str | (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.watch_workflows(namespace, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue, fields=fields) + print("The response of WorkflowServiceApi->watch_workflows:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->watch_workflows: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] - **fields** | **str**| | [optional] + **namespace** | **str**| | + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **fields** | **str**| | [optional] ### Return type @@ -56083,7 +1398,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -56094,7 +1408,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **workflow_logs** -> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry workflow_logs(namespace, name) +> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry workflow_logs(namespace, name, pod_name=pod_name, log_options_container=log_options_container, log_options_follow=log_options_follow, log_options_previous=log_options_previous, log_options_since_seconds=log_options_since_seconds, log_options_since_time_seconds=log_options_since_time_seconds, log_options_since_time_nanos=log_options_since_time_nanos, log_options_timestamps=log_options_timestamps, log_options_tail_lines=log_options_tail_lines, log_options_limit_bytes=log_options_limit_bytes, log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, grep=grep, selector=selector) @@ -56103,12 +1417,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.models.stream_result_of_io_argoproj_workflow_v1alpha1_log_entry import StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -56121,7 +1434,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -56129,59 +1442,53 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_service_api.WorkflowServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - pod_name = "podName_example" # str | (optional) - log_options_container = "logOptions.container_example" # str | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. (optional) + api_instance = argo_workflows.WorkflowServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + pod_name = 'pod_name_example' # str | (optional) + log_options_container = 'log_options_container_example' # str | The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. (optional) log_options_follow = True # bool | Follow the log stream of the pod. Defaults to false. +optional. (optional) log_options_previous = True # bool | Return previous terminated container logs. Defaults to false. +optional. (optional) - log_options_since_seconds = "logOptions.sinceSeconds_example" # str | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. (optional) - log_options_since_time_seconds = "logOptions.sinceTime.seconds_example" # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) - log_options_since_time_nanos = 1 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) + log_options_since_seconds = 'log_options_since_seconds_example' # str | A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. (optional) + log_options_since_time_seconds = 'log_options_since_time_seconds_example' # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) + log_options_since_time_nanos = 56 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) log_options_timestamps = True # bool | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. (optional) - log_options_tail_lines = "logOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) - log_options_limit_bytes = "logOptions.limitBytes_example" # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) + log_options_tail_lines = 'log_options_tail_lines_example' # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) + log_options_limit_bytes = 'log_options_limit_bytes_example' # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) log_options_insecure_skip_tls_verify_backend = True # bool | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. (optional) - grep = "grep_example" # str | (optional) - selector = "selector_example" # str | (optional) + grep = 'grep_example' # str | (optional) + selector = 'selector_example' # str | (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.workflow_logs(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowServiceApi->workflow_logs: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.workflow_logs(namespace, name, pod_name=pod_name, log_options_container=log_options_container, log_options_follow=log_options_follow, log_options_previous=log_options_previous, log_options_since_seconds=log_options_since_seconds, log_options_since_time_seconds=log_options_since_time_seconds, log_options_since_time_nanos=log_options_since_time_nanos, log_options_timestamps=log_options_timestamps, log_options_tail_lines=log_options_tail_lines, log_options_limit_bytes=log_options_limit_bytes, log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, grep=grep, selector=selector) + print("The response of WorkflowServiceApi->workflow_logs:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowServiceApi->workflow_logs: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **pod_name** | **str**| | [optional] - **log_options_container** | **str**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] - **log_options_follow** | **bool**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] - **log_options_previous** | **bool**| Return previous terminated container logs. Defaults to false. +optional. | [optional] - **log_options_since_seconds** | **str**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] - **log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] - **log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] - **log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] - **log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] - **log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] - **grep** | **str**| | [optional] - **selector** | **str**| | [optional] + **namespace** | **str**| | + **name** | **str**| | + **pod_name** | **str**| | [optional] + **log_options_container** | **str**| The container for which to stream logs. Defaults to only container if there is one container in the pod. +optional. | [optional] + **log_options_follow** | **bool**| Follow the log stream of the pod. Defaults to false. +optional. | [optional] + **log_options_previous** | **bool**| Return previous terminated container logs. Defaults to false. +optional. | [optional] + **log_options_since_seconds** | **str**| A relative time in seconds before the current time from which to show logs. If this value precedes the time a pod was started, only logs since the pod start will be returned. If this value is in the future, no logs will be returned. Only one of sinceSeconds or sinceTime may be specified. +optional. | [optional] + **log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] + **log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] + **log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] + **log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] + **log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **grep** | **str**| | [optional] + **selector** | **str**| | [optional] ### Return type @@ -56196,7 +1503,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/docs/WorkflowTemplateServiceApi.md b/sdks/python/client/docs/WorkflowTemplateServiceApi.md index 8f9032d97d57..bbafbac126d1 100644 --- a/sdks/python/client/docs/WorkflowTemplateServiceApi.md +++ b/sdks/python/client/docs/WorkflowTemplateServiceApi.md @@ -22,13 +22,12 @@ Method | HTTP request | Description * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_create_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_create_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -41,7 +40,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -49,10697 +48,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) - namespace = "namespace_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest( - create_options=CreateOptions( - dry_run=[ - "dry_run_example", - ], - field_manager="field_manager_example", - field_validation="field_validation_example", - ), - namespace="namespace_example", - template=IoArgoprojWorkflowV1alpha1WorkflowTemplate( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - ), - ) # IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest | + api_instance = argo_workflows.WorkflowTemplateServiceApi(api_client) + namespace = 'namespace_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest() # IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.create_workflow_template(namespace, body) + print("The response of WorkflowTemplateServiceApi->create_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowTemplateServiceApi->create_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest**](IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest.md)| | + **namespace** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest**](IoArgoprojWorkflowV1alpha1WorkflowTemplateCreateRequest.md)| | ### Return type @@ -10754,7 +83,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -10765,7 +93,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **delete_workflow_template** -> bool, date, datetime, dict, float, int, list, str, none_type delete_workflow_template(namespace, name) +> object delete_workflow_template(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) @@ -10774,11 +102,10 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -10791,7 +118,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -10799,51 +126,43 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - delete_options_grace_period_seconds = "deleteOptions.gracePeriodSeconds_example" # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) - delete_options_preconditions_uid = "deleteOptions.preconditions.uid_example" # str | Specifies the target UID. +optional. (optional) - delete_options_preconditions_resource_version = "deleteOptions.preconditions.resourceVersion_example" # str | Specifies the target ResourceVersion +optional. (optional) + api_instance = argo_workflows.WorkflowTemplateServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + delete_options_grace_period_seconds = 'delete_options_grace_period_seconds_example' # str | The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. (optional) + delete_options_preconditions_uid = 'delete_options_preconditions_uid_example' # str | Specifies the target UID. +optional. (optional) + delete_options_preconditions_resource_version = 'delete_options_preconditions_resource_version_example' # str | Specifies the target ResourceVersion +optional. (optional) delete_options_orphan_dependents = True # bool | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. (optional) - delete_options_propagation_policy = "deleteOptions.propagationPolicy_example" # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) - delete_options_dry_run = [ - "deleteOptions.dryRun_example", - ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.delete_workflow_template(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowTemplateServiceApi->delete_workflow_template: %s\n" % e) + delete_options_propagation_policy = 'delete_options_propagation_policy_example' # str | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. (optional) + delete_options_dry_run = ['delete_options_dry_run_example'] # List[str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.delete_workflow_template(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + print("The response of WorkflowTemplateServiceApi->delete_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowTemplateServiceApi->delete_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] - **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] - **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] - **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] - **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] - **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] + **namespace** | **str**| | + **name** | **str**| | + **delete_options_grace_period_seconds** | **str**| The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. +optional. | [optional] + **delete_options_preconditions_uid** | **str**| Specifies the target UID. +optional. | [optional] + **delete_options_preconditions_resource_version** | **str**| Specifies the target ResourceVersion +optional. | [optional] + **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] + **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] + **delete_options_dry_run** | [**List[str]**](str.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional. | [optional] ### Return type -**bool, date, datetime, dict, float, int, list, str, none_type** +**object** ### Authorization @@ -10854,7 +173,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -10865,7 +183,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_workflow_template** -> IoArgoprojWorkflowV1alpha1WorkflowTemplate get_workflow_template(namespace, name) +> IoArgoprojWorkflowV1alpha1WorkflowTemplate get_workflow_template(namespace, name, get_options_resource_version=get_options_resource_version) @@ -10874,12 +192,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -10892,7 +209,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -10900,35 +217,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | - get_options_resource_version = "getOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + api_instance = argo_workflows.WorkflowTemplateServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | + get_options_resource_version = 'get_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - # example passing only required values which don't have defaults set - try: - api_response = api_instance.get_workflow_template(namespace, name) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowTemplateServiceApi->get_workflow_template: %s\n" % e) - - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.get_workflow_template(namespace, name, get_options_resource_version=get_options_resource_version) + print("The response of WorkflowTemplateServiceApi->get_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowTemplateServiceApi->get_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| | - **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **namespace** | **str**| | + **name** | **str**| | + **get_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] ### Return type @@ -10943,7 +254,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -10963,13 +273,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_lint_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -10982,7 +291,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -10990,10697 +299,27 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) - namespace = "namespace_example" # str | - body = IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest( - create_options=CreateOptions( - dry_run=[ - "dry_run_example", - ], - field_manager="field_manager_example", - field_validation="field_validation_example", - ), - namespace="namespace_example", - template=IoArgoprojWorkflowV1alpha1WorkflowTemplate( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - ), - ) # IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest | + api_instance = argo_workflows.WorkflowTemplateServiceApi(api_client) + namespace = 'namespace_example' # str | + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest() # IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.lint_workflow_template(namespace, body) + print("The response of WorkflowTemplateServiceApi->lint_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowTemplateServiceApi->lint_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest**](IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest.md)| | + **namespace** | **str**| | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest**](IoArgoprojWorkflowV1alpha1WorkflowTemplateLintRequest.md)| | ### Return type @@ -21695,7 +334,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -21706,7 +344,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_workflow_templates** -> IoArgoprojWorkflowV1alpha1WorkflowTemplateList list_workflow_templates(namespace) +> IoArgoprojWorkflowV1alpha1WorkflowTemplateList list_workflow_templates(namespace, name_pattern=name_pattern, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) @@ -21715,12 +353,11 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_list import IoArgoprojWorkflowV1alpha1WorkflowTemplateList +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_list import IoArgoprojWorkflowV1alpha1WorkflowTemplateList +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -21733,7 +370,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -21741,51 +378,45 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) - namespace = "namespace_example" # str | - name_pattern = "namePattern_example" # str | (optional) - list_options_label_selector = "listOptions.labelSelector_example" # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) - list_options_field_selector = "listOptions.fieldSelector_example" # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) + api_instance = argo_workflows.WorkflowTemplateServiceApi(api_client) + namespace = 'namespace_example' # str | + name_pattern = 'name_pattern_example' # str | (optional) + list_options_label_selector = 'list_options_label_selector_example' # str | A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. (optional) + list_options_field_selector = 'list_options_field_selector_example' # str | A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. (optional) list_options_watch = True # bool | Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. (optional) list_options_allow_watch_bookmarks = True # bool | allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. (optional) - list_options_resource_version = "listOptions.resourceVersion_example" # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_resource_version_match = "listOptions.resourceVersionMatch_example" # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) - list_options_timeout_seconds = "listOptions.timeoutSeconds_example" # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) - list_options_limit = "listOptions.limit_example" # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) - list_options_continue = "listOptions.continue_example" # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - - # example passing only required values which don't have defaults set - try: - api_response = api_instance.list_workflow_templates(namespace) - pprint(api_response) - except argo_workflows.ApiException as e: - print("Exception when calling WorkflowTemplateServiceApi->list_workflow_templates: %s\n" % e) + list_options_resource_version = 'list_options_resource_version_example' # str | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_resource_version_match = 'list_options_resource_version_match_example' # str | resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional (optional) + list_options_timeout_seconds = 'list_options_timeout_seconds_example' # str | Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. (optional) + list_options_limit = 'list_options_limit_example' # str | limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. (optional) + list_options_continue = 'list_options_continue_example' # str | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. (optional) - # example passing only required values which don't have defaults set - # and optional values try: api_response = api_instance.list_workflow_templates(namespace, name_pattern=name_pattern, list_options_label_selector=list_options_label_selector, list_options_field_selector=list_options_field_selector, list_options_watch=list_options_watch, list_options_allow_watch_bookmarks=list_options_allow_watch_bookmarks, list_options_resource_version=list_options_resource_version, list_options_resource_version_match=list_options_resource_version_match, list_options_timeout_seconds=list_options_timeout_seconds, list_options_limit=list_options_limit, list_options_continue=list_options_continue) + print("The response of WorkflowTemplateServiceApi->list_workflow_templates:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowTemplateServiceApi->list_workflow_templates: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name_pattern** | **str**| | [optional] - **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] - **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] - **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] - **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] - **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] - **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] - **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] - **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] + **namespace** | **str**| | + **name_pattern** | **str**| | [optional] + **list_options_label_selector** | **str**| A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional. | [optional] + **list_options_field_selector** | **str**| A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional. | [optional] + **list_options_watch** | **bool**| Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional. | [optional] + **list_options_allow_watch_bookmarks** | **bool**| allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. +optional. | [optional] + **list_options_resource_version** | **str**| resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_resource_version_match** | **str**| resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional | [optional] + **list_options_timeout_seconds** | **str**| Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional. | [optional] + **list_options_limit** | **str**| limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. | [optional] + **list_options_continue** | **str**| The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. | [optional] ### Return type @@ -21800,7 +431,6 @@ Name | Type | Description | Notes - **Content-Type**: Not defined - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | @@ -21820,13 +450,12 @@ Name | Type | Description | Notes * Api Key Authentication (BearerToken): ```python -import time import argo_workflows -from argo_workflows.api import workflow_template_service_api -from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate -from argo_workflows.model.io_argoproj_workflow_v1alpha1_workflow_template_update_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template import IoArgoprojWorkflowV1alpha1WorkflowTemplate +from argo_workflows.models.io_argoproj_workflow_v1alpha1_workflow_template_update_request import IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest +from argo_workflows.rest import ApiException from pprint import pprint + # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. configuration = argo_workflows.Configuration( @@ -21839,7 +468,7 @@ configuration = argo_workflows.Configuration( # satisfies your auth use case. # Configure API key authorization: BearerToken -configuration.api_key['BearerToken'] = 'YOUR_API_KEY' +configuration.api_key['BearerToken'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed # configuration.api_key_prefix['BearerToken'] = 'Bearer' @@ -21847,10693 +476,29 @@ configuration.api_key['BearerToken'] = 'YOUR_API_KEY' # Enter a context with an instance of the API client with argo_workflows.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = workflow_template_service_api.WorkflowTemplateServiceApi(api_client) - namespace = "namespace_example" # str | - name = "name_example" # str | DEPRECATED: This field is ignored. - body = IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest( - name="name_example", - namespace="namespace_example", - template=IoArgoprojWorkflowV1alpha1WorkflowTemplate( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=IoArgoprojWorkflowV1alpha1WorkflowSpec( - active_deadline_seconds=1, - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_logs=True, - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - artifact_gc=IoArgoprojWorkflowV1alpha1WorkflowLevelArtifactGC( - force_finalizer_removal=True, - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_spec_patch="pod_spec_patch_example", - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifact_repository_ref=IoArgoprojWorkflowV1alpha1ArtifactRepositoryRef( - config_map="config_map_example", - key="key_example", - ), - automount_service_account_token=True, - dns_config=PodDNSConfig( - nameservers=[ - "nameservers_example", - ], - options=[ - PodDNSConfigOption( - name="name_example", - value="value_example", - ), - ], - searches=[ - "searches_example", - ], - ), - dns_policy="dns_policy_example", - entrypoint="entrypoint_example", - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - host_network=True, - image_pull_secrets=[ - LocalObjectReference( - name="name_example", - ), - ], - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - node_selector={ - "key": "key_example", - }, - on_exit="on_exit_example", - parallelism=1, - pod_disruption_budget=IoK8sApiPolicyV1PodDisruptionBudgetSpec( - max_unavailable="max_unavailable_example", - min_available="min_available_example", - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - ), - pod_gc=IoArgoprojWorkflowV1alpha1PodGC( - delete_delay_duration=Duration( - duration="duration_example", - ), - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - strategy="strategy_example", - ), - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - pod_priority=1, - pod_priority_class_name="pod_priority_class_name_example", - pod_spec_patch="pod_spec_patch_example", - priority=1, - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - shutdown="shutdown_example", - suspend=True, - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - template_defaults=IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - templates=[ - IoArgoprojWorkflowV1alpha1Template( - active_deadline_seconds="active_deadline_seconds_example", - affinity=Affinity( - node_affinity=NodeAffinity( - preferred_during_scheduling_ignored_during_execution=[ - PreferredSchedulingTerm( - preference=NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=NodeSelector( - node_selector_terms=[ - NodeSelectorTerm( - match_expressions=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - match_fields=[ - NodeSelectorRequirement( - key="key_example", - operator="DoesNotExist", - values=[ - "values_example", - ], - ), - ], - ), - ], - ), - ), - pod_affinity=PodAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - pod_anti_affinity=PodAntiAffinity( - preferred_during_scheduling_ignored_during_execution=[ - WeightedPodAffinityTerm( - pod_affinity_term=PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - weight=1, - ), - ], - required_during_scheduling_ignored_during_execution=[ - PodAffinityTerm( - label_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespace_selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - namespaces=[ - "namespaces_example", - ], - topology_key="topology_key_example", - ), - ], - ), - ), - archive_location=IoArgoprojWorkflowV1alpha1ArtifactLocation( - archive_logs=True, - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - ), - automount_service_account_token=True, - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="Always", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="FallbackToLogsOnError", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - container_set=IoArgoprojWorkflowV1alpha1ContainerSetTemplate( - containers=[ - IoArgoprojWorkflowV1alpha1ContainerNode( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - dependencies=[ - "dependencies_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - retry_strategy=IoArgoprojWorkflowV1alpha1ContainerSetRetryStrategy( - duration="duration_example", - retries="retries_example", - ), - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - ), - daemon=True, - dag=IoArgoprojWorkflowV1alpha1DAGTemplate( - fail_fast=True, - target="target_example", - tasks=[ - IoArgoprojWorkflowV1alpha1DAGTask( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - dependencies=[ - "dependencies_example", - ], - depends="depends_example", - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ], - ), - data=IoArgoprojWorkflowV1alpha1Data( - source=IoArgoprojWorkflowV1alpha1DataSource( - artifact_paths=IoArgoprojWorkflowV1alpha1ArtifactPaths( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - transformation=[ - IoArgoprojWorkflowV1alpha1TransformationStep( - expression="expression_example", - ), - ], - ), - executor=IoArgoprojWorkflowV1alpha1ExecutorConfig( - service_account_name="service_account_name_example", - ), - fail_fast=True, - host_aliases=[ - HostAlias( - hostnames=[ - "hostnames_example", - ], - ip="ip_example", - ), - ], - http=IoArgoprojWorkflowV1alpha1HTTP( - body="body_example", - body_from=IoArgoprojWorkflowV1alpha1HTTPBodySource( - bytes='YQ==', - ), - headers=[ - IoArgoprojWorkflowV1alpha1HTTPHeader( - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1HTTPHeaderSource( - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - insecure_skip_verify=True, - method="method_example", - success_condition="success_condition_example", - timeout_seconds=1, - url="url_example", - ), - init_containers=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - inputs=IoArgoprojWorkflowV1alpha1Inputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - memoize=IoArgoprojWorkflowV1alpha1Memoize( - cache=IoArgoprojWorkflowV1alpha1Cache( - config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - key="key_example", - max_age="max_age_example", - ), - metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - metrics=IoArgoprojWorkflowV1alpha1Metrics( - prometheus=[ - IoArgoprojWorkflowV1alpha1Prometheus( - counter=IoArgoprojWorkflowV1alpha1Counter( - value="value_example", - ), - gauge=IoArgoprojWorkflowV1alpha1Gauge( - operation="operation_example", - realtime=True, - value="value_example", - ), - help="help_example", - histogram=IoArgoprojWorkflowV1alpha1Histogram( - buckets=[ - 3.14, - ], - value="value_example", - ), - labels=[ - IoArgoprojWorkflowV1alpha1MetricLabel( - key="key_example", - value="value_example", - ), - ], - name="name_example", - when="when_example", - ), - ], - ), - name="name_example", - node_selector={ - "key": "key_example", - }, - outputs=IoArgoprojWorkflowV1alpha1Outputs( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - exit_code="exit_code_example", - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - result="result_example", - ), - parallelism=1, - plugin={}, - pod_spec_patch="pod_spec_patch_example", - priority=1, - priority_class_name="priority_class_name_example", - resource=IoArgoprojWorkflowV1alpha1ResourceTemplate( - action="action_example", - failure_condition="failure_condition_example", - flags=[ - "flags_example", - ], - manifest="manifest_example", - manifest_from=IoArgoprojWorkflowV1alpha1ManifestFrom( - artifact=IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ), - merge_strategy="merge_strategy_example", - set_owner_reference=True, - success_condition="success_condition_example", - ), - retry_strategy=IoArgoprojWorkflowV1alpha1RetryStrategy( - affinity=IoArgoprojWorkflowV1alpha1RetryAffinity( - node_anti_affinity={}, - ), - backoff=IoArgoprojWorkflowV1alpha1Backoff( - duration="duration_example", - factor="factor_example", - max_duration="max_duration_example", - ), - expression="expression_example", - limit="limit_example", - retry_policy="retry_policy_example", - ), - scheduler_name="scheduler_name_example", - script=IoArgoprojWorkflowV1alpha1ScriptTemplate( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - source="source_example", - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - security_context=PodSecurityContext( - fs_group=1, - fs_group_change_policy="fs_group_change_policy_example", - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - supplemental_groups=[ - 1, - ], - sysctls=[ - Sysctl( - name="name_example", - value="value_example", - ), - ], - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - service_account_name="service_account_name_example", - sidecars=[ - IoArgoprojWorkflowV1alpha1UserContainer( - args=[ - "args_example", - ], - command=[ - "command_example", - ], - env=[ - EnvVar( - name="name_example", - value="value_example", - value_from=EnvVarSource( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - secret_key_ref=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - ], - env_from=[ - EnvFromSource( - config_map_ref=ConfigMapEnvSource( - name="name_example", - optional=True, - ), - prefix="prefix_example", - secret_ref=SecretEnvSource( - name="name_example", - optional=True, - ), - ), - ], - image="image_example", - image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - mirror_volume_mounts=True, - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="SCTP", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - security_context=SecurityContext( - allow_privilege_escalation=True, - capabilities=Capabilities( - add=[ - "add_example", - ], - drop=[ - "drop_example", - ], - ), - privileged=True, - proc_mount="proc_mount_example", - read_only_root_filesystem=True, - run_as_group=1, - run_as_non_root=True, - run_as_user=1, - se_linux_options=SELinuxOptions( - level="level_example", - role="role_example", - type="type_example", - user="user_example", - ), - seccomp_profile=SeccompProfile( - localhost_profile="localhost_profile_example", - type="Localhost", - ), - windows_options=WindowsSecurityContextOptions( - gmsa_credential_spec="gmsa_credential_spec_example", - gmsa_credential_spec_name="gmsa_credential_spec_name_example", - host_process=True, - run_as_user_name="run_as_user_name_example", - ), - ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="HTTP", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], - volume_mounts=[ - VolumeMount( - mount_path="mount_path_example", - mount_propagation="mount_propagation_example", - name="name_example", - read_only=True, - sub_path="sub_path_example", - sub_path_expr="sub_path_expr_example", - ), - ], - working_dir="working_dir_example", - ), - ], - steps=[ - IoArgoprojWorkflowV1alpha1ParallelSteps([ - IoArgoprojWorkflowV1alpha1WorkflowStep( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - continue_on=IoArgoprojWorkflowV1alpha1ContinueOn( - error=True, - failed=True, - ), - hooks={ - "key": IoArgoprojWorkflowV1alpha1LifecycleHook( - arguments=IoArgoprojWorkflowV1alpha1Arguments( - artifacts=[ - IoArgoprojWorkflowV1alpha1Artifact( - archive=IoArgoprojWorkflowV1alpha1ArchiveStrategy( - _none={}, - tar=IoArgoprojWorkflowV1alpha1TarStrategy( - compression_level=1, - ), - zip={}, - ), - archive_logs=True, - artifact_gc=IoArgoprojWorkflowV1alpha1ArtifactGC( - pod_metadata=IoArgoprojWorkflowV1alpha1Metadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - ), - service_account_name="service_account_name_example", - strategy="strategy_example", - ), - artifactory=IoArgoprojWorkflowV1alpha1ArtifactoryArtifact( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - url="url_example", - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - azure=IoArgoprojWorkflowV1alpha1AzureArtifact( - account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - blob="blob_example", - container="container_example", - endpoint="endpoint_example", - use_sdk_creds=True, - ), - deleted=True, - _from="_from_example", - from_expression="from_expression_example", - gcs=IoArgoprojWorkflowV1alpha1GCSArtifact( - bucket="bucket_example", - key="key_example", - service_account_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - git=IoArgoprojWorkflowV1alpha1GitArtifact( - branch="branch_example", - depth=1, - disable_submodules=True, - fetch=[ - "fetch_example", - ], - insecure_ignore_host_key=True, - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - repo="repo_example", - revision="revision_example", - single_branch=True, - ssh_private_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - global_name="global_name_example", - hdfs=IoArgoprojWorkflowV1alpha1HDFSArtifact( - addresses=[ - "addresses_example", - ], - force=True, - hdfs_user="hdfs_user_example", - krb_c_cache_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_config_config_map=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_keytab_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - krb_realm="krb_realm_example", - krb_service_principal_name="krb_service_principal_name_example", - krb_username="krb_username_example", - path="path_example", - ), - http=IoArgoprojWorkflowV1alpha1HTTPArtifact( - auth=IoArgoprojWorkflowV1alpha1HTTPAuth( - basic_auth=IoArgoprojWorkflowV1alpha1BasicAuth( - password_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - username_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - client_cert=IoArgoprojWorkflowV1alpha1ClientCertAuth( - client_cert_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - oauth2=IoArgoprojWorkflowV1alpha1OAuth2Auth( - client_id_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - client_secret_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - endpoint_params=[ - IoArgoprojWorkflowV1alpha1OAuth2EndpointParam( - key="key_example", - value="value_example", - ), - ], - scopes=[ - "scopes_example", - ], - token_url_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - ), - headers=[ - IoArgoprojWorkflowV1alpha1Header( - name="name_example", - value="value_example", - ), - ], - url="url_example", - ), - mode=1, - name="name_example", - optional=True, - oss=IoArgoprojWorkflowV1alpha1OSSArtifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - create_bucket_if_not_present=True, - endpoint="endpoint_example", - key="key_example", - lifecycle_rule=IoArgoprojWorkflowV1alpha1OSSLifecycleRule( - mark_deletion_after_days=1, - mark_infrequent_access_after_days=1, - ), - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - security_token="security_token_example", - use_sdk_creds=True, - ), - path="path_example", - raw=IoArgoprojWorkflowV1alpha1RawArtifact( - data="data_example", - ), - recurse_mode=True, - s3=IoArgoprojWorkflowV1alpha1S3Artifact( - access_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - bucket="bucket_example", - ca_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - create_bucket_if_not_present=IoArgoprojWorkflowV1alpha1CreateS3BucketOptions( - object_locking=True, - ), - encryption_options=IoArgoprojWorkflowV1alpha1S3EncryptionOptions( - enable_encryption=True, - kms_encryption_context="kms_encryption_context_example", - kms_key_id="kms_key_id_example", - server_side_customer_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - ), - endpoint="endpoint_example", - insecure=True, - key="key_example", - region="region_example", - role_arn="role_arn_example", - secret_key_secret=SecretKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - use_sdk_creds=True, - ), - sub_path="sub_path_example", - ), - ], - parameters=[ - IoArgoprojWorkflowV1alpha1Parameter( - default="default_example", - description="description_example", - enum=[ - "enum_example", - ], - global_name="global_name_example", - name="name_example", - value="value_example", - value_from=IoArgoprojWorkflowV1alpha1ValueFrom( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - default="default_example", - event="event_example", - expression="expression_example", - jq_filter="jq_filter_example", - json_path="json_path_example", - parameter="parameter_example", - path="path_example", - supplied={}, - ), - ), - ], - ), - expression="expression_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - ), - }, - inline=IoArgoprojWorkflowV1alpha1Template(), - name="name_example", - on_exit="on_exit_example", - template="template_example", - template_ref=IoArgoprojWorkflowV1alpha1TemplateRef( - cluster_scope=True, - name="name_example", - template="template_example", - ), - when="when_example", - with_items=[ - {}, - ], - with_param="with_param_example", - with_sequence=IoArgoprojWorkflowV1alpha1Sequence( - count="count_example", - end="end_example", - format="format_example", - start="start_example", - ), - ), - ]), - ], - suspend=IoArgoprojWorkflowV1alpha1SuspendTemplate( - duration="duration_example", - ), - synchronization=IoArgoprojWorkflowV1alpha1Synchronization( - mutex=IoArgoprojWorkflowV1alpha1Mutex( - name="name_example", - namespace="namespace_example", - ), - semaphore=IoArgoprojWorkflowV1alpha1SemaphoreRef( - config_map_key_ref=ConfigMapKeySelector( - key="key_example", - name="name_example", - optional=True, - ), - namespace="namespace_example", - ), - ), - timeout="timeout_example", - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - ), - ], - tolerations=[ - Toleration( - effect="NoExecute", - key="key_example", - operator="Equal", - toleration_seconds=1, - value="value_example", - ), - ], - ttl_strategy=IoArgoprojWorkflowV1alpha1TTLStrategy( - seconds_after_completion=1, - seconds_after_failure=1, - seconds_after_success=1, - ), - volume_claim_gc=IoArgoprojWorkflowV1alpha1VolumeClaimGC( - strategy="strategy_example", - ), - volume_claim_templates=[ - PersistentVolumeClaim( - api_version="api_version_example", - kind="kind_example", - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - status=PersistentVolumeClaimStatus( - access_modes=[ - "access_modes_example", - ], - allocated_resources={ - "key": "key_example", - }, - capacity={ - "key": "key_example", - }, - conditions=[ - PersistentVolumeClaimCondition( - last_probe_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), - message="message_example", - reason="reason_example", - status="status_example", - type="FileSystemResizePending", - ), - ], - phase="Bound", - resize_status="resize_status_example", - ), - ), - ], - volumes=[ - Volume( - aws_elastic_block_store=AWSElasticBlockStoreVolumeSource( - fs_type="fs_type_example", - partition=1, - read_only=True, - volume_id="volume_id_example", - ), - azure_disk=AzureDiskVolumeSource( - caching_mode="caching_mode_example", - disk_name="disk_name_example", - disk_uri="disk_uri_example", - fs_type="fs_type_example", - kind="kind_example", - read_only=True, - ), - azure_file=AzureFileVolumeSource( - read_only=True, - secret_name="secret_name_example", - share_name="share_name_example", - ), - cephfs=CephFSVolumeSource( - monitors=[ - "monitors_example", - ], - path="path_example", - read_only=True, - secret_file="secret_file_example", - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - cinder=CinderVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_id="volume_id_example", - ), - config_map=ConfigMapVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - csi=CSIVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - node_publish_secret_ref=LocalObjectReference( - name="name_example", - ), - read_only=True, - volume_attributes={ - "key": "key_example", - }, - ), - downward_api=DownwardAPIVolumeSource( - default_mode=1, - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - empty_dir=EmptyDirVolumeSource( - medium="medium_example", - size_limit="size_limit_example", - ), - ephemeral=EphemeralVolumeSource( - volume_claim_template=PersistentVolumeClaimTemplate( - metadata=ObjectMeta( - annotations={ - "key": "key_example", - }, - cluster_name="cluster_name_example", - creation_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - deletion_grace_period_seconds=1, - deletion_timestamp=dateutil_parser('1970-01-01T00:00:00.00Z'), - finalizers=[ - "finalizers_example", - ], - generate_name="generate_name_example", - generation=1, - labels={ - "key": "key_example", - }, - managed_fields=[ - ManagedFieldsEntry( - api_version="api_version_example", - fields_type="fields_type_example", - fields_v1={}, - manager="manager_example", - operation="operation_example", - subresource="subresource_example", - time=dateutil_parser('1970-01-01T00:00:00.00Z'), - ), - ], - name="name_example", - namespace="namespace_example", - owner_references=[ - OwnerReference( - api_version="api_version_example", - block_owner_deletion=True, - controller=True, - kind="kind_example", - name="name_example", - uid="uid_example", - ), - ], - resource_version="resource_version_example", - self_link="self_link_example", - uid="uid_example", - ), - spec=PersistentVolumeClaimSpec( - access_modes=[ - "access_modes_example", - ], - data_source=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - data_source_ref=TypedLocalObjectReference( - api_group="api_group_example", - kind="kind_example", - name="name_example", - ), - resources=ResourceRequirements( - limits={ - "key": "key_example", - }, - requests={ - "key": "key_example", - }, - ), - selector=LabelSelector( - match_expressions=[ - LabelSelectorRequirement( - key="key_example", - operator="operator_example", - values=[ - "values_example", - ], - ), - ], - match_labels={ - "key": "key_example", - }, - ), - storage_class_name="storage_class_name_example", - volume_mode="volume_mode_example", - volume_name="volume_name_example", - ), - ), - ), - fc=FCVolumeSource( - fs_type="fs_type_example", - lun=1, - read_only=True, - target_wwns=[ - "target_wwns_example", - ], - wwids=[ - "wwids_example", - ], - ), - flex_volume=FlexVolumeSource( - driver="driver_example", - fs_type="fs_type_example", - options={ - "key": "key_example", - }, - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ), - flocker=FlockerVolumeSource( - dataset_name="dataset_name_example", - dataset_uuid="dataset_uuid_example", - ), - gce_persistent_disk=GCEPersistentDiskVolumeSource( - fs_type="fs_type_example", - partition=1, - pd_name="pd_name_example", - read_only=True, - ), - git_repo=GitRepoVolumeSource( - directory="directory_example", - repository="repository_example", - revision="revision_example", - ), - glusterfs=GlusterfsVolumeSource( - endpoints="endpoints_example", - path="path_example", - read_only=True, - ), - host_path=HostPathVolumeSource( - path="path_example", - type="type_example", - ), - iscsi=ISCSIVolumeSource( - chap_auth_discovery=True, - chap_auth_session=True, - fs_type="fs_type_example", - initiator_name="initiator_name_example", - iqn="iqn_example", - iscsi_interface="iscsi_interface_example", - lun=1, - portals=[ - "portals_example", - ], - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - target_portal="target_portal_example", - ), - name="name_example", - nfs=NFSVolumeSource( - path="path_example", - read_only=True, - server="server_example", - ), - persistent_volume_claim=PersistentVolumeClaimVolumeSource( - claim_name="claim_name_example", - read_only=True, - ), - photon_persistent_disk=PhotonPersistentDiskVolumeSource( - fs_type="fs_type_example", - pd_id="pd_id_example", - ), - portworx_volume=PortworxVolumeSource( - fs_type="fs_type_example", - read_only=True, - volume_id="volume_id_example", - ), - projected=ProjectedVolumeSource( - default_mode=1, - sources=[ - VolumeProjection( - config_map=ConfigMapProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - downward_api=DownwardAPIProjection( - items=[ - DownwardAPIVolumeFile( - field_ref=ObjectFieldSelector( - api_version="api_version_example", - field_path="field_path_example", - ), - mode=1, - path="path_example", - resource_field_ref=ResourceFieldSelector( - container_name="container_name_example", - divisor="divisor_example", - resource="resource_example", - ), - ), - ], - ), - secret=SecretProjection( - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - name="name_example", - optional=True, - ), - service_account_token=ServiceAccountTokenProjection( - audience="audience_example", - expiration_seconds=1, - path="path_example", - ), - ), - ], - ), - quobyte=QuobyteVolumeSource( - group="group_example", - read_only=True, - registry="registry_example", - tenant="tenant_example", - user="user_example", - volume="volume_example", - ), - rbd=RBDVolumeSource( - fs_type="fs_type_example", - image="image_example", - keyring="keyring_example", - monitors=[ - "monitors_example", - ], - pool="pool_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - user="user_example", - ), - scale_io=ScaleIOVolumeSource( - fs_type="fs_type_example", - gateway="gateway_example", - protection_domain="protection_domain_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - ssl_enabled=True, - storage_mode="storage_mode_example", - storage_pool="storage_pool_example", - system="system_example", - volume_name="volume_name_example", - ), - secret=SecretVolumeSource( - default_mode=1, - items=[ - KeyToPath( - key="key_example", - mode=1, - path="path_example", - ), - ], - optional=True, - secret_name="secret_name_example", - ), - storageos=StorageOSVolumeSource( - fs_type="fs_type_example", - read_only=True, - secret_ref=LocalObjectReference( - name="name_example", - ), - volume_name="volume_name_example", - volume_namespace="volume_namespace_example", - ), - vsphere_volume=VsphereVirtualDiskVolumeSource( - fs_type="fs_type_example", - storage_policy_id="storage_policy_id_example", - storage_policy_name="storage_policy_name_example", - volume_path="volume_path_example", - ), - ), - ], - workflow_metadata=IoArgoprojWorkflowV1alpha1WorkflowMetadata( - annotations={ - "key": "key_example", - }, - labels={ - "key": "key_example", - }, - labels_from={ - "key": IoArgoprojWorkflowV1alpha1LabelValueFrom( - expression="expression_example", - ), - }, - ), - workflow_template_ref=IoArgoprojWorkflowV1alpha1WorkflowTemplateRef( - cluster_scope=True, - name="name_example", - ), - ), - ), - ) # IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest | + api_instance = argo_workflows.WorkflowTemplateServiceApi(api_client) + namespace = 'namespace_example' # str | + name = 'name_example' # str | DEPRECATED: This field is ignored. + body = argo_workflows.IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest() # IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest | - # example passing only required values which don't have defaults set try: api_response = api_instance.update_workflow_template(namespace, name, body) + print("The response of WorkflowTemplateServiceApi->update_workflow_template:\n") pprint(api_response) - except argo_workflows.ApiException as e: + except Exception as e: print("Exception when calling WorkflowTemplateServiceApi->update_workflow_template: %s\n" % e) ``` + ### Parameters + Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | - **name** | **str**| DEPRECATED: This field is ignored. | - **body** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest**](IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest.md)| | + **namespace** | **str**| | + **name** | **str**| DEPRECATED: This field is ignored. | + **body** | [**IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest**](IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest.md)| | ### Return type @@ -32548,7 +513,6 @@ Name | Type | Description | Notes - **Content-Type**: application/json - **Accept**: application/json - ### HTTP response details | Status code | Description | Response headers | diff --git a/sdks/python/client/requirements.txt b/sdks/python/client/requirements.txt index 96947f60408f..cc85509ec516 100644 --- a/sdks/python/client/requirements.txt +++ b/sdks/python/client/requirements.txt @@ -1,3 +1,5 @@ python_dateutil >= 2.5.3 setuptools >= 21.0.0 -urllib3 >= 1.25.3 +urllib3 >= 1.25.3, < 2.1.0 +pydantic >= 2 +typing-extensions >= 4.7.1 diff --git a/sdks/python/client/setup.py b/sdks/python/client/setup.py index ab2303a69c39..c8742ade961a 100644 --- a/sdks/python/client/setup.py +++ b/sdks/python/client/setup.py @@ -1,27 +1,33 @@ +# coding: utf-8 + """ Argo Workflows API - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 from setuptools import setup, find_packages # noqa: H301 -NAME = "argo-workflows" -VERSION = "0.0.0-pre" # To install the library, run the following # # python setup.py install # # prerequisite: setuptools # http://pypi.python.org/pypi/setuptools - +NAME = "argo-workflows" +VERSION = "0.0.0-pre" +PYTHON_REQUIRES = ">=3.7" REQUIRES = [ - "urllib3 >= 1.25.3", - "python-dateutil", + "urllib3 >= 1.25.3, < 2.1.0", + "python-dateutil", + "pydantic >= 2", + "typing-extensions >= 4.7.1", ] setup( @@ -32,11 +38,12 @@ author_email="team@openapitools.org", url="", keywords=["OpenAPI", "OpenAPI-Generator", "Argo Workflows API"], - python_requires=">=3.6", install_requires=REQUIRES, packages=find_packages(exclude=["test", "tests"]), include_package_data=True, + long_description_content_type='text/markdown', long_description="""\ - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - """ + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ + """, # noqa: E501 + package_data={"argo_workflows": ["py.typed"]}, ) diff --git a/sdks/python/flake.nix b/sdks/python/flake.nix index 545fb34e430e..8e3c7e962bfa 100755 --- a/sdks/python/flake.nix +++ b/sdks/python/flake.nix @@ -13,9 +13,9 @@ perSystem = { pkgs, lib, config, ... }: let - openapi_generator_cli_5_4_0 = pkgs.openapi-generator-cli.overrideAttrs (oldAttrs: rec { + openapi_generator_cli_7_4_0 = pkgs.openapi-generator-cli.overrideAttrs (oldAttrs: rec { pname = "openapi-generator-cli"; - version = "5.4.0"; # update this when updating sdk Makefile + version = "7.4.0"; # update this when updating sdk Makefile jarfilename = "${pname}-${version}.jar"; src = pkgs.fetchurl { url = "mirror://maven/org/openapitools/${pname}/${version}/${jarfilename}"; @@ -54,7 +54,7 @@ pname = "argo-client-python-${version}"; nativeBuildInputs = [ - openapi_generator_cli_5_4_0 + openapi_generator_cli_7_4_0 pkgs.gnused pythonEnv ]; @@ -85,7 +85,7 @@ devShells = { default = pkgs.mkShell { packages = with pkgs; [ - openapi_generator_cli_5_4_0 + openapi_generator_cli_7_4_0 openjdk8-bootstrap gnused ];